prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>0003_submission_ques_id.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-06 05:21
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('app', '0002_auto_20170105_0206'),
]
operations = [
migrations.AddField(<|fim▁hole|> ),
]<|fim▁end|> | model_name='submission',
name='ques_ID',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='app.Question'),
preserve_default=False, |
<|file_name|>groups.py<|end_file_name|><|fim▁begin|># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .base import Processor, NotConfiguredException
from googleapiclient import discovery
from google.oauth2.credentials import Credentials
from urllib.parse import urlencode
import re
class GroupsProcessor(Processor):
def process(self, config_key=None):
if config_key is None:
config_key = 'groups'
if config_key not in self.config:
raise NotConfiguredException(
'No Cloud Identity groups configuration specified in config!')
groups_config = self.config[config_key]
service_account = groups_config[
'serviceAccountEmail'] if 'serviceAccountEmail' in groups_config else None
group_credentials = Credentials(
self.get_token_for_scopes([
'https://www.googleapis.com/auth/cloud-identity.groups.readonly'
],
service_account=service_account))
group_service = discovery.build(
'cloudidentity',
'v1',
http=self._get_branded_http(group_credentials))
query = groups_config['query'] if 'query' in groups_config else ""
query_template = self.jinja_environment.from_string(query)
query_template.name = 'query'
query_output = query_template.render()
page_token = None
all_groups = {}
groups_by_owner = {}
groups_by_manager = {}
group_filter = None
if 'filter' in groups_config:
group_filter_template = self.jinja_environment.from_string(
groups_config['filter'])
group_filter_template.name = 'group_filter'
group_filter_output = group_filter_template.render()
group_filter = re.compile(group_filter_output)
while True:
search_query = urlencode({"query": query_output})
search_group_request = group_service.groups().search(
pageToken=page_token, pageSize=1, view="FULL")
param = "&" + search_query
search_group_request.uri += param<|fim▁hole|> response = search_group_request.execute()
if 'groups' in response:
for group in response['groups']:
group_key = group['groupKey']['id']
if group_filter:
if not group_filter.match(group_key):
continue
group['owners'] = []
group['managers'] = []
membership_page_token = None
while True:
membership_request = group_service.groups().memberships(
).list(parent=group['name'],
pageToken=membership_page_token)
membership_response = membership_request.execute()
group['memberships'] = {}
if 'memberships' in membership_response:
owners = []
managers = []
for member in membership_response['memberships']:
member_key = member['preferredMemberKey']['id']
group['memberships'][member_key] = member
if 'roles' in member:
for role in member['roles']:
if role['name'] == 'OWNER':
owners.append(member_key)
group['owners'].append(member_key)
if role['name'] == 'MANAGER':
managers.append(member_key)
group['managers'].append(member_key)
for owner in owners:
if owner not in groups_by_owner:
groups_by_owner[owner] = [group]
else:
groups_by_owner[owner].append(group)
for manager in managers:
if manager not in groups_by_manager:
groups_by_manager[manager] = [group]
else:
groups_by_manager[manager].append(group)
if 'nextPageToken' in membership_response:
membership_page_token = membership_response[
'nextPageToken']
else:
break
all_groups[group_key] = group
if 'nextPageToken' in response:
page_token = response['nextPageToken']
else:
break
res = {
'all_groups': all_groups,
'groups_by_owner': groups_by_owner,
'groups_by_manager': groups_by_manager
}
return res<|fim▁end|> | |
<|file_name|>Channel.cpp<|end_file_name|><|fim▁begin|>/**
* Channel.cpp
* This file is part of the YATE Project http://YATE.null.ro
*
* Yet Another Telephony Engine - a fully featured software PBX and IVR
* Copyright (C) 2004-2013 Null Team
*
* This software is distributed under multiple licenses;
* see the COPYING file in the main directory for licensing
* information for this specific distribution.
*
* This use of this software may be subject to additional restrictions.
* See the LEGAL file in the main directory for details.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
*/
#include "yatephone.h"
#include <string.h>
#include <stdlib.h>
using namespace TelEngine;
// Find if a string appears to be an E164 phone number
bool TelEngine::isE164(const char* str)
{
if (!str)
return false;
// an initial + character is ok, we skip it
if (*str == '+')
str++;
// at least one valid character is required
if (!*str)
return false;
for (;;) {
switch (*str++) {
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '*':
case '#':
break;
case '\0':
return true;
default:
return false;
}
}
}
static unsigned int s_callid = 0;
static Mutex s_callidMutex(false,"CallID");
// this is to protect against two threads trying to (dis)connect a pair
// of call endpoints at the same time
static Mutex s_mutex(true,"CallEndpoint");
static const String s_audioType = "audio";
static const String s_copyParams = "copyparams";
CallEndpoint::CallEndpoint(const char* id)
: m_peer(0), m_id(id), m_mutex(0)
{
}
void CallEndpoint::destroyed()
{
#ifdef DEBUG
ObjList* l = m_data.skipNull();
for (; l; l=l->skipNext()) {
DataEndpoint* e = static_cast<DataEndpoint*>(l->get());
Debug(DebugAll,"Endpoint at %p type '%s' refcount=%d",e,e->name().c_str(),e->refcount());
}
#endif
disconnect(true,0,true,0);
clearEndpoint();
}
Mutex& CallEndpoint::commonMutex()
{
return s_mutex;
}
void* CallEndpoint::getObject(const String& name) const
{
if (name == YATOM("CallEndpoint"))
return const_cast<CallEndpoint*>(this);
return RefObject::getObject(name);
}
void CallEndpoint::setId(const char* newId)
{
m_id = newId;
}
bool CallEndpoint::connect(CallEndpoint* peer, const char* reason, bool notify)
{
if (!peer) {
disconnect(reason,notify);
return false;
}
if (peer == m_peer)
return true;
if (peer == this) {
Debug(DebugWarn,"CallEndpoint '%s' trying to connect to itself! [%p]",m_id.c_str(),this);
return false;
}
DDebug(DebugAll,"CallEndpoint '%s' connecting peer %p to [%p]",m_id.c_str(),peer,this);
#if 0
Lock lock(s_mutex,5000000);
if (!lock.locked()) {
Alarm("engine","bug",DebugFail,"Call connect failed - timeout on call endpoint mutex owned by '%s'!",s_mutex.owner());
Engine::restart(0);
return false;
}
#endif
// are we already dead?
if (!ref())
return false;
disconnect(reason,notify);
// is our intended peer dead?
if (!peer->ref()) {
deref();
return false;
}
peer->disconnect(reason,notify);
ObjList* l = m_data.skipNull();
for (; l; l=l->skipNext()) {
DataEndpoint* e = static_cast<DataEndpoint*>(l->get());
e->connect(peer->getEndpoint(e->name()));
}
m_peer = peer;
peer->setPeer(this,reason,notify);
setDisconnect(0);
connected(reason);
return true;
}
bool CallEndpoint::disconnect(bool final, const char* reason, bool notify, const NamedList* params)
{
if (!m_peer)
return false;
DDebug(DebugAll,"CallEndpoint '%s' disconnecting peer %p from [%p]",m_id.c_str(),m_peer,this);
Lock lock(s_mutex,5000000);
if (!lock.locked()) {
Alarm("engine","bug",DebugFail,"Call disconnect failed - timeout on call endpoint mutex owned by '%s'!",s_mutex.owner());
Engine::restart(0);
return false;
}
CallEndpoint *temp = m_peer;
m_peer = 0;
if (!temp)
return false;
ObjList* l = m_data.skipNull();
for (; l; l=l->skipNext()) {
DataEndpoint* e = static_cast<DataEndpoint*>(l->get());
DDebug(DebugAll,"Endpoint at %p type '%s' peer %p",e,e->name().c_str(),e->getPeer());
e->disconnect();
}
temp->setPeer(0,reason,notify,params);
if (final)
disconnected(true,reason);
lock.drop();
temp->deref();
return deref();
}
void CallEndpoint::setPeer(CallEndpoint* peer, const char* reason, bool notify, const NamedList* params)
{
m_peer = peer;
if (m_peer) {
setDisconnect(0);
connected(reason);
}
else if (notify) {
setDisconnect(params);
disconnected(false,reason);
}
}
bool CallEndpoint::getPeerId(String& id) const
{
id.clear();
if (!m_peer)
return false;
Lock lock(s_mutex,5000000);
if (!lock.locked()) {
Alarm("engine","bug",DebugFail,"Peer ID failed - timeout on call endpoint mutex owned by '%s'!",s_mutex.owner());
Engine::restart(0);
return false;
}
if (m_peer) {
id = m_peer->id();
return true;
}
else
return false;
}
String CallEndpoint::getPeerId() const
{
String id;
getPeerId(id);
return id;
}
DataEndpoint* CallEndpoint::getEndpoint(const String& type) const
{
if (type.null())
return 0;
const ObjList* pos = m_data.find(type);
return pos ? static_cast<DataEndpoint*>(pos->get()) : 0;
}
DataEndpoint* CallEndpoint::setEndpoint(const String& type)
{
if (type.null())
return 0;
DataEndpoint* dat = getEndpoint(type);
if (!dat) {
dat = new DataEndpoint(this,type);
if (m_peer)
dat->connect(m_peer->getEndpoint(type));
}
return dat;
}
void CallEndpoint::setEndpoint(DataEndpoint* endPoint)
{
if (!(endPoint && endPoint->ref()))
return;
if (m_data.find(endPoint)) {
endPoint->deref();
return;
}
clearEndpoint(endPoint->toString());
endPoint->disconnect();
m_data.append(endPoint);
if (m_peer)
endPoint->connect(m_peer->getEndpoint(endPoint->toString()));
}
void CallEndpoint::clearEndpoint(const String& type)
{
if (type.null()) {
ObjList* l = m_data.skipNull();
for (; l; l=l->skipNext()) {
DataEndpoint* e = static_cast<DataEndpoint*>(l->get());
DDebug(DebugAll,"Endpoint at %p type '%s' peer %p",e,e->name().c_str(),e->getPeer());
e->disconnect();
e->clearCall(this);
}
m_data.clear();
}
else {
DataEndpoint* dat = getEndpoint(type);
if (dat) {
m_data.remove(dat,false);
dat->disconnect();
dat->clearCall(this);
dat->destruct();
}
}
}
void CallEndpoint::setSource(DataSource* source, const String& type)
{
DataEndpoint* dat = source ? setEndpoint(type) : getEndpoint(type);
if (dat)
dat->setSource(source);
}
DataSource* CallEndpoint::getSource(const String& type) const
{
DataEndpoint* dat = getEndpoint(type);
return dat ? dat->getSource() : 0;
}
void CallEndpoint::setConsumer(DataConsumer* consumer, const String& type)
{
DataEndpoint* dat = consumer ? setEndpoint(type) : getEndpoint(type);
if (dat)
dat->setConsumer(consumer);
}
DataConsumer* CallEndpoint::getConsumer(const String& type) const
{
DataEndpoint* dat = getEndpoint(type);
return dat ? dat->getConsumer() : 0;
}
bool CallEndpoint::clearData(DataNode* node, const String& type)
{
if (type.null() || !node)
return false;
Lock mylock(DataEndpoint::commonMutex());
RefPointer<DataEndpoint> dat = getEndpoint(type);
return dat && dat->clearData(node);
}
const String& CallEndpoint::audioType()
{
return s_audioType;
}
static const String s_disconnected("chan.disconnected");
// Mutex used to lock disconnect parameters during access
static Mutex s_paramMutex(true,"ChannelParams");
Channel::Channel(Driver* driver, const char* id, bool outgoing)
: CallEndpoint(id),
m_parameters(""), m_driver(driver), m_outgoing(outgoing),
m_timeout(0), m_maxcall(0), m_maxPDD(0), m_dtmfTime(0),
m_toutAns(0), m_dtmfSeq(0), m_answered(false)
{
init();
}
Channel::Channel(Driver& driver, const char* id, bool outgoing)
: CallEndpoint(id),
m_parameters(""), m_driver(&driver), m_outgoing(outgoing),
m_timeout(0), m_maxcall(0), m_maxPDD(0), m_dtmfTime(0),
m_toutAns(0), m_dtmfSeq(0), m_answered(false)
{
init();
}
Channel::~Channel()
{
#ifdef DEBUG
Debugger debug(DebugAll,"Channel::~Channel()"," '%s' [%p]",id().c_str(),this);
#endif
cleanup();
}
void* Channel::getObject(const String& name) const
{
if (name == YATOM("Channel"))
return const_cast<Channel*>(this);
if (name == YATOM("MessageNotifier"))
return static_cast<MessageNotifier*>(const_cast<Channel*>(this));
return CallEndpoint::getObject(name);
}
Mutex& Channel::paramMutex()
{
return s_paramMutex;
}
void Channel::init()
{
status(direction());
m_mutex = m_driver;
if (m_driver) {
m_driver->lock();
debugName(m_driver->debugName());
debugChain(m_driver);
if (id().null()) {
String tmp(m_driver->prefix());
tmp << m_driver->nextid();
setId(tmp);
}
m_driver->unlock();
}
// assign a new billid only to incoming calls
if (m_billid.null() && !m_outgoing)
m_billid << Engine::runId() << "-" << allocId();
DDebug(this,DebugInfo,"Channel::init() '%s' [%p]",id().c_str(),this);
}
void Channel::cleanup()
{
m_timeout = 0;
m_maxcall = 0;
m_maxPDD = 0;
status("deleted");
m_targetid.clear();
dropChan();
m_driver = 0;
m_mutex = 0;
}
void Channel::filterDebug(const String& item)
{
if (m_driver) {
if (m_driver->filterInstalled())
debugEnabled(m_driver->filterDebug(item));
else
debugChain(m_driver);
}
}
void Channel::initChan()
{
if (!m_driver)
return;
Lock mylock(m_driver);
#ifndef NDEBUG
if (m_driver->channels().find(this)) {
Debug(DebugGoOn,"Channel '%s' already in list of '%s' driver [%p]",
id().c_str(),m_driver->name().c_str(),this);
return;
}
#endif
m_driver->m_total++;
m_driver->m_chanCount++;
m_driver->channels().append(this);
m_driver->changed();
}
void Channel::dropChan()
{
if (!m_driver)
return;
m_driver->lock();
if (!m_driver)
Debug(DebugFail,"Driver lost in dropChan! [%p]",this);
if (m_driver->channels().remove(this,false)) {
if (m_driver->m_chanCount > 0)
m_driver->m_chanCount--;
m_driver->changed();
}
m_driver->unlock();
}
void Channel::zeroRefs()
{
// remove us from driver's list before calling the destructor
dropChan();
CallEndpoint::zeroRefs();
}
void Channel::connected(const char* reason)
{
CallEndpoint::connected(reason);
if (m_billid.null()) {
Channel* peer = YOBJECT(Channel,getPeer());
if (peer && peer->billid())
m_billid = peer->billid();
}
Message* m = message("chan.connected",false,true);
if (reason)
m->setParam("reason",reason);
if (!Engine::enqueue(m))
TelEngine::destruct(m);
getPeerId(m_lastPeerId);
}
void Channel::disconnected(bool final, const char* reason)
{
if (final || Engine::exiting())
return;
// last chance to get reconnected to something
Message* m = getDisconnect(reason);
s_paramMutex.lock();
m_targetid.clear();
m_parameters.clearParams();
s_paramMutex.unlock();
Engine::enqueue(m);
}
void Channel::setDisconnect(const NamedList* params)
{
DDebug(this,DebugInfo,"setDisconnect(%p) [%p]",params,this);
s_paramMutex.lock();
m_parameters.clearParams();
if (params)
m_parameters.copyParams(*params);
s_paramMutex.unlock();
}
void Channel::endDisconnect(const Message& msg, bool handled)
{
}
void Channel::dispatched(const Message& msg, bool handled)
{
if (s_disconnected == msg)
endDisconnect(msg,handled);
}
void Channel::setId(const char* newId)
{
debugName(0);
CallEndpoint::setId(newId);
debugName(id());
}
Message* Channel::getDisconnect(const char* reason)
{
Message* msg = new Message(s_disconnected);
s_paramMutex.lock();
msg->copyParams(m_parameters);
s_paramMutex.unlock();
complete(*msg);
if (reason)
msg->setParam("reason",reason);
// we will remain referenced until the message is destroyed
msg->userData(this);
msg->setNotify();
return msg;
}
void Channel::status(const char* newstat)
{
Lock lock(mutex());
m_status = newstat;
if (!m_answered && (m_status == YSTRING("answered"))) {
m_answered = true;
// stop pre-answer timeout, restart answered timeout
m_maxcall = 0;
maxPDD(0);
if (m_toutAns)
timeout(Time::now() + m_toutAns*(u_int64_t)1000);
}
else if (m_status == YSTRING("ringing") || m_status == YSTRING("progressing"))
maxPDD(0);
}
const char* Channel::direction() const
{
return m_outgoing ? "outgoing" : "incoming";
}
void Channel::setMaxcall(const Message* msg, int defTout)
{
int tout = msg ? msg->getIntValue(YSTRING("timeout"),defTout) : defTout;
if (tout > 0) {
m_toutAns = tout;
timeout(Time::now() + tout*(u_int64_t)1000);
}
else if (tout == 0) {
m_toutAns = 0;
timeout(0);
}
if (m_answered)
maxcall(0);
else if (msg) {
tout = msg->getIntValue(YSTRING("maxcall"),-1);
if (tout > 0) {
timeout(0);
maxcall(Time::now() + tout*(u_int64_t)1000);
}
else if (tout == 0)
maxcall(0);
}
}
void Channel::setMaxPDD(const Message& msg)
{
if (m_answered) {
maxPDD(0);
return;
}
int tout = msg.getIntValue(YSTRING("maxpdd"),-1);
if (tout > 0)
maxPDD(Time::now() + tout * (u_int64_t)1000);
else if (tout == 0)
maxPDD(0);
}
void Channel::complete(Message& msg, bool minimal) const
{
static const String s_hangup("chan.hangup");
msg.setParam("id",id());
if (m_driver)
msg.setParam("module",m_driver->name());
if (s_hangup == msg) {
s_paramMutex.lock();
msg.copyParams(parameters());
s_paramMutex.unlock();
}
if (minimal)
return;
if (m_status)
msg.setParam("status",m_status);
if (m_address)
msg.setParam("address",m_address);
if (m_targetid)
msg.setParam("targetid",m_targetid);
if (m_billid)
msg.setParam("billid",m_billid);
String peer;
if (getPeerId(peer))
msg.setParam("peerid",peer);
if (m_lastPeerId)
msg.setParam("lastpeerid",m_lastPeerId);
msg.setParam("answered",String::boolText(m_answered));
msg.setParam("direction",direction());
}
Message* Channel::message(const char* name, bool minimal, bool data)
{
Message* msg = new Message(name);
if (data)
msg->userData(this);
complete(*msg,minimal);
return msg;
}
Message* Channel::message(const char* name, const NamedList* original, const char* params, bool minimal, bool data)
{
Message* msg = message(name,minimal,data);
if (original) {
if (!params)
params = original->getValue(s_copyParams);
if (!null(params))
msg->copyParams(*original,params);
}
return msg;
}
bool Channel::startRouter(Message* msg)
{
if (!msg)
return false;
if (m_driver) {
Router* r = new Router(m_driver,id(),msg);
if (r->startup())
return true;
delete r;
}
else
TelEngine::destruct(msg);
callRejected("failure","Internal server error");
// dereference and die if the channel is dynamic
if (m_driver && m_driver->varchan())
deref();
return false;
}
bool Channel::msgProgress(Message& msg)
{
status("progressing");
if (m_billid.null())
m_billid = msg.getValue(YSTRING("billid"));
return true;
}
bool Channel::msgRinging(Message& msg)
{
status("ringing");
if (m_billid.null())
m_billid = msg.getValue(YSTRING("billid"));
return true;
}
bool Channel::msgAnswered(Message& msg)
{
m_maxcall = 0;
int tout = msg.getIntValue(YSTRING("timeout"),m_toutAns);
m_toutAns = (tout > 0) ? tout : 0;
status("answered");
m_answered = true;
if (m_billid.null())
m_billid = msg.getValue(YSTRING("billid"));
return true;
}
bool Channel::msgTone(Message& msg, const char* tone)
{
return false;
}
bool Channel::msgText(Message& msg, const char* text)
{
return false;
}
bool Channel::msgDrop(Message& msg, const char* reason)
{
m_timeout = m_maxcall = m_maxPDD = 0;
status(null(reason) ? "dropped" : reason);
disconnect(reason,msg);
return true;
}
bool Channel::msgTransfer(Message& msg)
{
return false;
}
bool Channel::msgUpdate(Message& msg)
{
return false;
}
bool Channel::msgMasquerade(Message& msg)
{
if (m_billid.null())
m_billid = msg.getValue(YSTRING("billid"));
if (msg == YSTRING("call.answered")) {
Debug(this,DebugInfo,"Masquerading answer operation [%p]",this);
m_maxcall = 0;
maxPDD(0);
m_status = "answered";
}
else if (msg == YSTRING("call.progress")) {
Debug(this,DebugInfo,"Masquerading progress operation [%p]",this);
status("progressing");
}
else if (msg == YSTRING("call.ringing")) {
Debug(this,DebugInfo,"Masquerading ringing operation [%p]",this);
status("ringing");
}
else if (msg == YSTRING("chan.dtmf")) {
// add sequence, stop the message if it was a disallowed DTMF duplicate
if (dtmfSequence(msg) && m_driver && !m_driver->m_dtmfDups) {
Debug(this,DebugNote,"Stopping duplicate '%s' DTMF '%s' [%p]",
msg.getValue("detected"),msg.getValue("text"),this);
return true;
}
}
return false;
}
void Channel::msgStatus(Message& msg)
{
String par;
Lock lock(mutex());
complete(msg);
statusParams(par);
lock.drop();
msg.retValue().clear();
msg.retValue() << "name=" << id() << ",type=channel;" << par << "\r\n";
}
// Control message handler that is invoked only for messages to this channel
// Find a data endpoint to process it
bool Channel::msgControl(Message& msg)
{
setMaxcall(msg);
setMaxPDD(msg);
for (ObjList* o = m_data.skipNull(); o; o = o->skipNext()) {
DataEndpoint* dep = static_cast<DataEndpoint*>(o->get());
if (dep->control(msg))
return true;
}
return false;
}
void Channel::statusParams(String& str)
{
if (m_driver)
str.append("module=",",") << m_driver->name();
String peer;
if (getPeerId(peer))
str.append("peerid=",",") << peer;
str.append("status=",",") << m_status;
str << ",direction=" << direction();
str << ",answered=" << m_answered;
str << ",targetid=" << m_targetid;
str << ",address=" << m_address;
str << ",billid=" << m_billid;
if (m_timeout || m_maxcall || m_maxPDD) {
u_int64_t t = Time::now();
if (m_timeout) {
str << ",timeout=";
if (m_timeout > t)
str << (unsigned int)((m_timeout - t + 500) / 1000);
else
str << "expired";
}
if (m_maxcall) {
str << ",maxcall=";
if (m_maxcall > t)
str << (unsigned int)((m_maxcall - t + 500) / 1000);
else
str << "expired";
}
if (m_maxPDD) {
str << ",maxpdd=";
if (m_maxPDD > t)
str << (unsigned int)((m_maxPDD - t + 500) / 1000);
else
str << "expired";
}
}
}
void Channel::checkTimers(Message& msg, const Time& tmr)
{
if (timeout() && (timeout() < tmr))
msgDrop(msg,"timeout");
else if (maxcall() && (maxcall() < tmr))
msgDrop(msg,"noanswer");
else if (maxPDD() && (maxPDD() < tmr))
msgDrop(msg,"postdialdelay");
}
bool Channel::callPrerouted(Message& msg, bool handled)
{
status("prerouted");
// accept a new billid at this stage
String* str = msg.getParam(YSTRING("billid"));
if (str)
m_billid = *str;
return true;
}
bool Channel::callRouted(Message& msg)
{
status("routed");
if (m_billid.null())
m_billid = msg.getValue(YSTRING("billid"));
return true;
}
void Channel::callAccept(Message& msg)
{
status("accepted");
int defTout = m_driver ? m_driver->timeout() : -1;
if (defTout <= 0)
defTout = -1;
setMaxcall(msg,defTout);
if (m_billid.null())
m_billid = msg.getValue(YSTRING("billid"));
m_targetid = msg.getValue(YSTRING("targetid"));
String detect = msg.getValue(YSTRING("tonedetect_in"));
if (detect && detect.toBoolean(true)) {
if (detect.toBoolean(false))
detect = "tone/*";
toneDetect(detect);
}
if (msg.getBoolValue(YSTRING("autoanswer")))
msgAnswered(msg);
else if (msg.getBoolValue(YSTRING("autoring")))
msgRinging(msg);
else if (msg.getBoolValue(YSTRING("autoprogress")))
msgProgress(msg);
else if (m_targetid.null() && msg.getBoolValue(YSTRING("autoanswer"),true)) {
// no preference exists in the message so issue a notice
Debug(this,DebugNote,"Answering now call %s because we have no targetid [%p]",
id().c_str(),this);
msgAnswered(msg);
}
}
void Channel::callConnect(Message& msg)
{
String detect = msg.getValue(YSTRING("tonedetect_out"));
if (detect && detect.toBoolean(true)) {
if (detect.toBoolean(false))
detect = "tone/*";
toneDetect(detect);
}
}
void Channel::callRejected(const char* error, const char* reason, const Message* msg)
{
Debug(this,DebugMild,"Call rejected error='%s' reason='%s' [%p]",error,reason,this);
if (msg) {
const String* cp = msg->getParam(s_copyParams);
if (!TelEngine::null(cp)) {
s_paramMutex.lock();
parameters().copyParams(*msg,*cp);
s_paramMutex.unlock();
}
}
status("rejected");
}
bool Channel::dtmfSequence(Message& msg)
{
if ((msg != YSTRING("chan.dtmf")) || msg.getParam(YSTRING("sequence")))
return false;
bool duplicate = false;
const String* detected = msg.getParam(YSTRING("detected"));
const String* text = msg.getParam(YSTRING("text"));
Lock lock(mutex());
unsigned int seq = m_dtmfSeq;
if (text && detected &&
(*text == m_dtmfText) && (*detected != m_dtmfDetected) &&
(msg.msgTime() < m_dtmfTime))
duplicate = true;
else {
seq = ++m_dtmfSeq;
m_dtmfTime = msg.msgTime() + 4000000;
m_dtmfText = text;
m_dtmfDetected = detected;
}
// need to add sequence number used to detect reorders
msg.addParam("sequence",String(seq));
msg.addParam("duplicate",String::boolText(duplicate));
return duplicate;
}
bool Channel::dtmfEnqueue(Message* msg)
{
if (!msg)
return false;
if (dtmfSequence(*msg) && m_driver && !m_driver->m_dtmfDups) {
Debug(this,DebugNote,"Dropping duplicate '%s' DTMF '%s' [%p]",
msg->getValue("detected"),msg->getValue("text"),this);
TelEngine::destruct(msg);
return false;
}
return Engine::enqueue(msg);
}
bool Channel::dtmfInband(const char* tone)
{
if (null(tone))
return false;
Message m("chan.attach");
complete(m,true);
m.userData(this);
String tmp("tone/dtmfstr/");
tmp += tone;
m.setParam("override",tmp);
m.setParam("single","yes");
return Engine::dispatch(m);
}
bool Channel::toneDetect(const char* sniffer)
{
if (null(sniffer))
sniffer = "tone/*";
Message m("chan.attach");
complete(m,true);
m.userData(this);
m.setParam("sniffer",sniffer);
m.setParam("single","yes");
return Engine::dispatch(m);
}
bool Channel::setDebug(Message& msg)
{
String str = msg.getValue("line");
if (str.startSkip("level")) {
int dbg = debugLevel();
str >> dbg;
debugLevel(dbg);
}
else if (str == "reset")
debugChain(m_driver);
else if (str == "engine")
debugCopy();
else if (str.isBoolean())
debugEnabled(str.toBoolean(debugEnabled()));
msg.retValue() << "Channel " << id()
<< " debug " << (debugEnabled() ? "on" : "off")
<< " level " << debugLevel() << (debugChained() ? " chained" : "") << "\r\n";
return true;
}
unsigned int Channel::allocId()
{
s_callidMutex.lock();
unsigned int id = ++s_callid;
s_callidMutex.unlock();
return id;
}
TokenDict Module::s_messages[] = {
{ "engine.status", Module::Status },
{ "engine.timer", Module::Timer },
{ "engine.debug", Module::Level },
{ "engine.command", Module::Command },
{ "engine.help", Module::Help },
{ "engine.halt", Module::Halt },
{ "call.route", Module::Route },
{ "call.execute", Module::Execute },
{ "call.drop", Module::Drop },
{ "call.progress", Module::Progress },
{ "call.ringing", Module::Ringing },
{ "call.answered", Module::Answered },
{ "call.update", Module::Update },
{ "chan.dtmf", Module::Tone },
{ "chan.text", Module::Text },
{ "chan.masquerade", Module::Masquerade },
{ "chan.locate", Module::Locate },
{ "chan.transfer", Module::Transfer },
{ "chan.control", Module::Control },
{ "msg.execute", Module::MsgExecute },
{ 0, 0 }
};
unsigned int Module::s_delay = 5;
const char* Module::messageName(int id)
{
if ((id <= 0) || (id >PubLast))
return 0;
return lookup(id,s_messages);
}
Module::Module(const char* name, const char* type, bool earlyInit)
: Plugin(name,earlyInit), Mutex(true,"Module"),
m_init(false), m_relays(0), m_type(type), m_changed(0)
{
}
Module::~Module()
{
}
void* Module::getObject(const String& name) const
{
if (name == YATOM("Module"))
return const_cast<Module*>(this);
return Plugin::getObject(name);
}
bool Module::installRelay(int id, const char* name, unsigned priority)
{
if (!(id && name && priority))
return false;
Lock lock(this);
if (m_relays & id)
return true;
m_relays |= id;
MessageRelay* relay = new MessageRelay(name,this,id,priority,Module::name());
m_relayList.append(relay)->setDelete(false);
Engine::install(relay);
return true;
}
bool Module::installRelay(int id, unsigned priority)
{
return installRelay(id,messageName(id),priority);
}
bool Module::installRelay(const char* name, unsigned priority)
{
return installRelay(lookup(name,s_messages),name,priority);
}
bool Module::installRelay(MessageRelay* relay)
{
if (!relay || ((relay->id() & m_relays) != 0) || m_relayList.find(relay))
return false;
m_relays |= relay->id();
m_relayList.append(relay)->setDelete(false);
Engine::install(relay);
return true;
}
bool Module::uninstallRelay(MessageRelay* relay, bool delRelay)
{
if (!relay || ((relay->id() & m_relays) == 0) || !m_relayList.remove(relay,false))
return false;
Engine::uninstall(relay);
m_relays &= ~relay->id();
if (delRelay)
TelEngine::destruct(relay);
return true;
}
bool Module::uninstallRelay(int id, bool delRelay)
{
if ((id & m_relays) == 0)
return false;
for (ObjList* l = m_relayList.skipNull(); l; l = l->skipNext()) {
MessageRelay* r = static_cast<MessageRelay*>(l->get());
if (r->id() != id)
continue;
Engine::uninstall(r);
m_relays &= ~id;
l->remove(delRelay);
break;
}
return false;
}
bool Module::uninstallRelays()
{
while (MessageRelay* relay = static_cast<MessageRelay*>(m_relayList.remove(false))) {
Engine::uninstall(relay);
m_relays &= ~relay->id();
relay->destruct();
}
return (0 == m_relays) && (0 == m_relayList.count());
}
void Module::initialize()
{
setup();
}
void Module::setup()
{
DDebug(this,DebugAll,"Module::setup()");
if (m_init)
return;
m_init = true;
installRelay(Timer,90);
installRelay(Status,110);
installRelay(Level,120);
installRelay(Command,120);
}
void Module::changed()
{
if (s_delay && !m_changed)
m_changed = Time::now() + s_delay*(u_int64_t)1000000;
}
void Module::msgTimer(Message& msg)
{
if (m_changed && (msg.msgTime() > m_changed)) {
Message* m = new Message("module.update");
m->addParam("module",name());
m_changed = 0;
genUpdate(*m);
Engine::enqueue(m);
}
}
bool Module::msgRoute(Message& msg)
{
return false;
}
bool Module::msgCommand(Message& msg)
{
const NamedString* line = msg.getParam(YSTRING("line"));
if (line)
return commandExecute(msg.retValue(),*line);
if (msg.getParam(YSTRING("partline")) || msg.getParam(YSTRING("partword")))
return commandComplete(msg,msg.getValue(YSTRING("partline")),msg.getValue(YSTRING("partword")));
return false;
}
bool Module::commandExecute(String& retVal, const String& line)
{
return false;
}
bool Module::commandComplete(Message& msg, const String& partLine, const String& partWord)
{
if ((partLine == YSTRING("debug")) || (partLine == YSTRING("status")))
itemComplete(msg.retValue(),name(),partWord);
return false;
}
bool Module::itemComplete(String& itemList, const String& item, const String& partWord)
{
if (partWord.null() || item.startsWith(partWord)) {
itemList.append(item,"\t");
return true;
}
return false;
}
void Module::msgStatus(Message& msg)
{
String mod, par, det;
bool details = msg.getBoolValue(YSTRING("details"),true);
lock();
statusModule(mod);
statusParams(par);
if (details)
statusDetail(det);
unlock();
msg.retValue() << mod << ";" << par;
if (det)
msg.retValue() << ";" << det;
msg.retValue() << "\r\n";
}
void Module::statusModule(String& str)
{
str.append("name=",",") << name();
if (m_type)
str << ",type=" << m_type;
}
void Module::statusParams(String& str)
{
}
void Module::statusDetail(String& str)
{
}
void Module::genUpdate(Message& msg)
{
}
bool Module::received(Message &msg, int id)
{
if (name().null())
return false;
switch (id) {
case Timer:
lock();
msgTimer(msg);
unlock();
return false;
case Route:
return msgRoute(msg);
}
String dest = msg.getValue(YSTRING("module"));
if (id == Status) {
if (dest == name()) {
msgStatus(msg);
return true;
}
if (dest.null() || (dest == m_type))
msgStatus(msg);
return false;
}
else if (id == Level)
return setDebug(msg,dest);
else if (id == Command)
return msgCommand(msg);
return false;
}
bool Module::setDebug(Message& msg, const String& target)
{
if (target != name())
return false;
String str = msg.getValue("line");
if (str.startSkip("level")) {
int dbg = debugLevel();
str >> dbg;
debugLevel(dbg);
}
else if (str == "reset") {
debugLevel(TelEngine::debugLevel());
debugEnabled(true);
}
else if (str.startSkip("filter"))
m_filter = str;
else {
bool dbg = debugEnabled();
str >> dbg;
debugEnabled(dbg);
}
msg.retValue() << "Module " << name()
<< " debug " << (debugEnabled() ? "on" : "off")
<< " level " << debugLevel();
if (m_filter)
msg.retValue() << " filter: " << m_filter;
msg.retValue() << "\r\n";
return true;
}
bool Module::filterDebug(const String& item) const
{
return m_filter.null() ? debugEnabled() : m_filter.matches(item);
}
Driver::Driver(const char* name, const char* type)
: Module(name,type),
m_init(false), m_varchan(true),
m_routing(0), m_routed(0), m_total(0),
m_nextid(0), m_timeout(0),
m_maxroute(0), m_maxchans(0), m_chanCount(0), m_dtmfDups(false)
{
m_prefix << name << "/";
}
void* Driver::getObject(const String& name) const
{
if (name == YATOM("Driver"))
return const_cast<Driver*>(this);
return Module::getObject(name);
}
void Driver::initialize()
{
setup();
}
void Driver::setup(const char* prefix, bool minimal)
{
DDebug(this,DebugAll,"Driver::setup('%s',%d)",prefix,minimal);
Module::setup();
loadLimits();
if (m_init)
return;
m_init = true;
m_prefix = prefix ? prefix : name().c_str();
if (m_prefix && !m_prefix.endsWith("/"))
m_prefix += "/";
XDebug(DebugAll,"setup name='%s' prefix='%s'",name().c_str(),m_prefix.c_str());
installRelay(Masquerade,10);
installRelay(Locate,40);
installRelay(Drop,60);
installRelay(Execute,90);
installRelay(Control,90);
if (minimal)
return;
installRelay(Tone);
installRelay(Text);
installRelay(Ringing);
installRelay(Answered);
}
bool Driver::isBusy() const
{
return (m_routing || m_chanCount);
}
Channel* Driver::find(const String& id) const
{
const ObjList* pos = m_chans.find(id);
return pos ? static_cast<Channel*>(pos->get()) : 0;
}
bool Driver::received(Message &msg, int id)
{
if (!m_prefix)
return false;
// pick destination depending on message type
String dest;
switch (id) {
case Timer:
{
// check each channel for timeouts
lock();
ListIterator iter(m_chans);
Time t;
for (;;) {
RefPointer<Channel> c = static_cast<Channel*>(iter.get());
unlock();
if (!c)
break;
c->checkTimers(msg,t);
c = 0;
lock();
}
}
case Status:
// check if it's a channel status request
dest = msg.getValue(YSTRING("module"));
if (dest.startsWith(m_prefix))
break;
case Level:
case Route:
case Command:
return Module::received(msg,id);
case Halt:
dropAll(msg);
return false;
case Execute:
dest = msg.getValue(YSTRING("callto"));
break;
case Drop:
case Masquerade:
case Locate:
dest = msg.getValue(YSTRING("id"));
break;
default:
dest = msg.getValue(YSTRING("peerid"));
// if this channel is not the peer, try to match it as target
if (!dest.startsWith(m_prefix))
dest = msg.getValue(YSTRING("targetid"));
break;
}
XDebug(DebugAll,"id=%d prefix='%s' dest='%s'",id,m_prefix.c_str(),dest.c_str());
if (id == Drop) {
bool exact = (dest == name());
if (exact || dest.null() || (dest == type())) {
dropAll(msg);
return exact;
}
}
// handle call.execute which should start a new channel
if (id == Execute) {
if (!canAccept(false))
return false;
if (dest.startSkip(m_prefix,false) ||
(dest.startSkip("line/",false) && hasLine(msg.getValue(YSTRING("line")))))
return msgExecute(msg,dest);
return false;
}
// check if the message was for this driver
if (!dest.startsWith(m_prefix))
return false;
lock();
RefPointer<Channel> chan = find(dest);
unlock();
if (!chan) {
DDebug(this,DebugMild,"Could not find channel '%s'",dest.c_str());
return false;
}
switch (id) {
case Status:
chan->msgStatus(msg);
return true;
case Progress:
return chan->isIncoming() && !chan->isAnswered() && chan->msgProgress(msg);
case Ringing:
return chan->isIncoming() && !chan->isAnswered() && chan->msgRinging(msg);
case Answered:
return chan->isIncoming() && !chan->isAnswered() && chan->msgAnswered(msg);
case Tone:
return chan->msgTone(msg,msg.getValue("text"));
case Text:
return chan->msgText(msg,msg.getValue("text"));
case Drop:
return chan->msgDrop(msg,msg.getValue("reason"));
case Transfer:
return chan->msgTransfer(msg);
case Update:
return chan->msgUpdate(msg);
case Masquerade:
msg = msg.getValue(YSTRING("message"));
msg.clearParam(YSTRING("message"));
msg.userData(chan);
if (chan->msgMasquerade(msg))
return true;
chan->complete(msg,msg.getBoolValue(YSTRING("complete_minimal"),false));
return false;
case Locate:
msg.userData(chan);
return true;
case Control:
return chan->msgControl(msg);
}
return false;
}
void Driver::dropAll(Message &msg)
{
const char* reason = msg.getValue(YSTRING("reason"));
lock();
ListIterator iter(m_chans);
for (;;) {
RefPointer<Channel> c = static_cast<Channel*>(iter.get());
unlock();
if (!c)
break;
DDebug(this,DebugAll,"Dropping %s channel '%s' @%p [%p]",
name().c_str(),c->id().c_str(),static_cast<Channel*>(c),this);
c->msgDrop(msg,reason);
c = 0;
lock();
}
}
bool Driver::canAccept(bool routers)
{
if (Engine::exiting())
return false;
if (routers && !canRoute())
return false;
if (m_maxchans)
return (m_chanCount < m_maxchans);
return true;
}
bool Driver::canRoute()
{
if (Engine::exiting())
return false;
if (m_maxroute && (m_routing >= m_maxroute))
return false;
return true;
}
bool Driver::hasLine(const String& line) const
{
return false;
}
bool Driver::msgRoute(Message& msg)
{
String called = msg.getValue(YSTRING("called"));
if (called.null())
return false;
String line = msg.getValue(YSTRING("line"));
if (line.null())
line = msg.getValue(YSTRING("account"));
if (line && hasLine(line)) {
// asked to route to a line we have locally
msg.setParam("line",line);
msg.retValue() = prefix() + called;
return true;
}
return Module::msgRoute(msg);
}
void Driver::genUpdate(Message& msg)
{
msg.addParam("routed",String(m_routed));
msg.addParam("routing",String(m_routing));
msg.addParam("total",String(m_total));
msg.addParam("chans",String(m_chanCount));
}
void Driver::statusModule(String& str)
{
Module::statusModule(str);
str.append("format=Status|Address|Peer",",");
}
void Driver::statusParams(String& str)
{
Module::statusParams(str);
str.append("routed=",",") << m_routed;
str << ",routing=" << m_routing;
str << ",total=" << m_total;
str << ",chans=" << m_chanCount;
}
void Driver::statusDetail(String& str)
{
ObjList* l = m_chans.skipNull();
for (; l; l=l->skipNext()) {
Channel* c = static_cast<Channel*>(l->get());
str.append(c->id(),",") << "=" << c->status() << "|" << c->address() << "|" << c->getPeerId();
}
}
bool Driver::commandComplete(Message& msg, const String& partLine, const String& partWord)
{
bool ok = false;
bool listChans = String(msg.getValue(YSTRING("complete"))) == YSTRING("channels");
if (listChans && (partWord.null() || name().startsWith(partWord)))
msg.retValue().append(name(),"\t");
else
ok = Module::commandComplete(msg,partLine,partWord);
lock();
unsigned int nchans = m_chans.count();
unlock();
if (nchans && listChans) {
if (name().startsWith(partWord)) {
msg.retValue().append(prefix(),"\t");
return ok;
}
if (partWord.startsWith(prefix()))
ok = true;
lock();
ObjList* l = m_chans.skipNull();
for (; l; l=l->skipNext()) {
Channel* c = static_cast<Channel*>(l->get());
if (c->id().startsWith(partWord))
msg.retValue().append(c->id(),"\t");
}
unlock();
}
return ok;
}
bool Driver::setDebug(Message& msg, const String& target)
{
if (!target.startsWith(m_prefix))
return Module::setDebug(msg,target);
Lock lock(this);
Channel* chan = find(target);
if (chan)
return chan->setDebug(msg);
return false;
}
void Driver::loadLimits()
{
timeout(Engine::config().getIntValue(YSTRING("telephony"),"timeout"));
maxRoute(Engine::config().getIntValue(YSTRING("telephony"),"maxroute"));
maxChans(Engine::config().getIntValue(YSTRING("telephony"),"maxchans"));
dtmfDups(Engine::config().getBoolValue(YSTRING("telephony"),"dtmfdups"));
}
unsigned int Driver::nextid()
{
Lock lock(this);
return ++m_nextid;
}
Router::Router(Driver* driver, const char* id, Message* msg)
: Thread("Call Router"), m_driver(driver), m_id(id), m_msg(msg)
{
}
void Router::run()<|fim▁hole|>{
if (!(m_driver && m_msg))
return;
m_driver->lock();
m_driver->m_routing++;
m_driver->changed();
m_driver->unlock();
bool ok = route();
m_driver->lock();
m_driver->m_routing--;
if (ok)
m_driver->m_routed++;
m_driver->changed();
m_driver->unlock();
}
bool Router::route()
{
DDebug(m_driver,DebugAll,"Routing thread for '%s' [%p]",m_id.c_str(),this);
RefPointer<Channel> chan;
String tmp(m_msg->getValue(YSTRING("callto")));
bool ok = !tmp.null();
if (ok)
m_msg->retValue() = tmp;
else {
if (*m_msg == YSTRING("call.preroute")) {
ok = Engine::dispatch(m_msg);
m_driver->lock();
chan = m_driver->find(m_id);
m_driver->unlock();
if (!chan) {
Debug(m_driver,DebugInfo,"Connection '%s' vanished while prerouting!",m_id.c_str());
return false;
}
const String* cp = m_msg->getParam(s_copyParams);
if (!TelEngine::null(cp)) {
Channel::paramMutex().lock();
chan->parameters().copyParams(*m_msg,*cp);
Channel::paramMutex().unlock();
}
bool dropCall = ok && ((m_msg->retValue() == YSTRING("-")) || (m_msg->retValue() == YSTRING("error")));
if (dropCall)
chan->callRejected(m_msg->getValue(YSTRING("error"),"unknown"),
m_msg->getValue(YSTRING("reason")),m_msg);
else
dropCall = !chan->callPrerouted(*m_msg,ok);
if (dropCall) {
// get rid of the dynamic chans
if (m_driver->varchan())
chan->deref();
return false;
}
chan = 0;
*m_msg = "call.route";
m_msg->retValue().clear();
}
ok = Engine::dispatch(m_msg);
}
m_driver->lock();
chan = m_driver->find(m_id);
m_driver->unlock();
if (!chan) {
Debug(m_driver,DebugInfo,"Connection '%s' vanished while routing!",m_id.c_str());
return false;
}
// chan will keep it referenced even if message user data is changed
m_msg->userData(chan);
static const char s_noroute[] = "noroute";
static const char s_looping[] = "looping";
static const char s_noconn[] = "noconn";
if (ok && m_msg->retValue().trimSpaces()) {
if ((m_msg->retValue() == YSTRING("-")) || (m_msg->retValue() == YSTRING("error")))
chan->callRejected(m_msg->getValue(YSTRING("error"),"unknown"),
m_msg->getValue("reason"),m_msg);
else if (m_msg->getIntValue(YSTRING("antiloop"),1) <= 0) {
const char* error = m_msg->getValue(YSTRING("error"),s_looping);
chan->callRejected(error,m_msg->getValue(YSTRING("reason"),
((s_looping == error) ? "Call is looping" : (const char*)0)),m_msg);
}
else if (chan->callRouted(*m_msg)) {
*m_msg = "call.execute";
m_msg->setParam("callto",m_msg->retValue());
m_msg->clearParam(YSTRING("error"));
m_msg->retValue().clear();
ok = Engine::dispatch(m_msg);
if (ok)
chan->callAccept(*m_msg);
else {
const char* error = m_msg->getValue(YSTRING("error"),s_noconn);
const char* reason = m_msg->getValue(YSTRING("reason"),
((s_noconn == error) ? "Could not connect to target" : (const char*)0));
Message m(s_disconnected);
const String* cp = m_msg->getParam(s_copyParams);
if (!TelEngine::null(cp))
m.copyParams(*m_msg,*cp);
chan->complete(m);
m.setParam("error",error);
m.setParam("reason",reason);
m.setParam("reroute",String::boolText(true));
m.userData(chan);
m.setNotify();
if (!Engine::dispatch(m))
chan->callRejected(error,reason,m_msg);
}
}
}
else {
const char* error = m_msg->getValue(YSTRING("error"),s_noroute);
chan->callRejected(error,m_msg->getValue(YSTRING("reason"),
((s_noroute == error) ? "No route to call target" : (const char*)0)),m_msg);
}
// dereference again if the channel is dynamic
if (m_driver->varchan())
chan->deref();
return ok;
}
void Router::cleanup()
{
destruct(m_msg);
}
/* vi: set ts=8 sw=4 sts=4 noet: */<|fim▁end|> | |
<|file_name|>seafile_uk.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="uk" version="2.1">
<context>
<name>AccountManager</name>
<message>
<location filename="../src/account-mgr.cpp" line="99"/>
<source>failed to open account database</source>
<translation>Не вдалося відкрити базу даних облікових записів</translation>
</message>
<message>
<location filename="../src/account-mgr.cpp" line="493"/>
<source>Authorization expired, please re-login</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AccountSettingsDialog</name>
<message>
<location filename="../src/ui/account-settings-dialog.cpp" line="18"/>
<source>Account Settings</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-settings-dialog.cpp" line="53"/>
<source>Please enter the server address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-settings-dialog.cpp" line="57"/>
<location filename="../src/ui/account-settings-dialog.cpp" line="63"/>
<source>%1 is not a valid server address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-settings-dialog.cpp" line="83"/>
<source>Failed to save account information</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-settings-dialog.cpp" line="91"/>
<source>Failed to save the changes: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-settings-dialog.cpp" line="96"/>
<source>Successfully updated current account information</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-settings-dialog.h" line="107"/>
<source>Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-settings-dialog.h" line="108"/>
<source>Server Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-settings-dialog.h" line="109"/>
<source>Email</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-settings-dialog.h" line="110"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-settings-dialog.h" line="111"/>
<source>Cancel</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AccountView</name>
<message>
<location filename="../src/ui/account-view.cpp" line="113"/>
<source>Are you sure to remove account from "%1"?<br><b>Warning: All libraries of this account would be unsynced!</b></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="124"/>
<source>Failed to unsync libraries of this account: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="150"/>
<source>click to open the website</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="158"/>
<source>pro version</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="160"/>
<source>No account</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="195"/>
<source>Choose</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="204"/>
<source>Account settings</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="219"/>
<source>Login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="222"/>
<source>Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="235"/>
<source>Add an account</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="217"/>
<source>Logout</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="182"/>
<location filename="../src/ui/account-view.cpp" line="248"/>
<source>not logged in</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="425"/>
<source>Failed to remove local repos sync token: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="439"/>
<source>Logging out is not supported on your server (version too low).</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="441"/>
<source>Failed to remove information on server: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/account-view.cpp" line="461"/>
<source>Failed to get repo sync information from server: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-view.h" line="82"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-view.h" line="83"/>
<source>Account</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-view.h" line="84"/>
<source>email</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_account-view.h" line="85"/>
<source>server</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ActivitiesTab</name>
<message>
<location filename="../src/ui/activities-tab.cpp" line="122"/>
<source>More</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/activities-tab.cpp" line="167"/>
<source>File Activities are only supported in Seafile Server Professional Edition.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/activities-tab.cpp" line="169"/>
<source>retry</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/activities-tab.cpp" line="170"/>
<source>Failed to get actvities information. Please %1</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AutoUpdateManager</name>
<message>
<location filename="../src/filebrowser/auto-update-mgr.cpp" line="150"/>
<source>Upload Success</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/auto-update-mgr.cpp" line="151"/>
<source>File "%1"
uploaded successfully.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/auto-update-mgr.cpp" line="159"/>
<source>Upload Failure</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/auto-update-mgr.cpp" line="160"/>
<source>File "%1"
failed to upload.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AvatarService</name>
<message>
<location filename="../src/avatar-service.cpp" line="158"/>
<source>Failed to create avatars folder</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CloneTasksDialog</name>
<message>
<location filename="../src/ui/clone-tasks-dialog.cpp" line="36"/>
<source>Download tasks</source>
<translation>Завдання завантаження</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-dialog.cpp" line="54"/>
<source>remove all successful tasks</source>
<translation>видалити всі успішні завдання</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-dialog.cpp" line="85"/>
<source>No download tasks right now.</source>
<translation>Зараз немає завдань завантаження.</translation>
</message>
<message>
<location filename="../ui_clone-tasks-dialog.h" line="69"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_clone-tasks-dialog.h" line="70"/>
<source>Clear</source>
<translation>Очистити</translation>
</message>
<message>
<location filename="../ui_clone-tasks-dialog.h" line="71"/>
<source>Close</source>
<translation>Закрити</translation>
</message>
</context>
<context>
<name>CloneTasksTableModel</name>
<message>
<location filename="../src/ui/clone-tasks-table-model.cpp" line="112"/>
<source>Library</source>
<translation>Бібліотека</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-table-model.cpp" line="119"/>
<source>Path</source>
<translation>Шлях</translation>
</message>
</context>
<context>
<name>CloneTasksTableView</name>
<message>
<location filename="../src/ui/clone-tasks-table-view.cpp" line="82"/>
<source>Cancel this task</source>
<translation>Скасувати це завдання</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-table-view.cpp" line="84"/>
<source>cancel this task</source>
<translation>скасувати це завдання</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-table-view.cpp" line="89"/>
<location filename="../src/ui/clone-tasks-table-view.cpp" line="91"/>
<source>Remove this task</source>
<translation>Видалити це завдання</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-table-view.cpp" line="103"/>
<source>Failed to cancel this task:
%1</source>
<translation>Не вдалося скасувати це завдання:
%1</translation>
</message>
<message>
<location filename="../src/ui/clone-tasks-table-view.cpp" line="114"/>
<source>Failed to remove this task:
%1</source>
<translation>Не вдалося видалити цю задачу:
%1</translation>
</message>
</context>
<context>
<name>CloudView</name>
<message>
<location filename="../src/ui/cloud-view.cpp" line="118"/>
<source>Minimize</source>
<translation>Згорнути</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="123"/>
<source>Close</source>
<translation>Закрити</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="150"/>
<source>Libraries</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="153"/>
<source>Starred</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="171"/>
<source>Activities</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="174"/>
<source>Search</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="201"/>
<source>current download rate</source>
<translation>поточна швидкість завантаження</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="207"/>
<source>current upload rate</source>
<translation>поточна швидкість віддачі</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="212"/>
<source>Please Choose a folder to sync</source>
<translation>Будь ласка, оберіть папку для синхронізації</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="350"/>
<source>no server connected</source>
<translation>немає підключеного серверу</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="348"/>
<source>all servers connected</source>
<translation>усі сервери підключено</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="352"/>
<source>some servers not connected</source>
<translation>деякі сервери не підключені</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="371"/>
<location filename="../src/ui/cloud-view.cpp" line="372"/>
<source>%1 kB/s</source>
<translation>%1 kB/s</translation>
</message>
<message>
<location filename="../src/ui/cloud-view.cpp" line="418"/>
<source>Refresh</source>
<translation>Оновити</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="231"/>
<source>Form</source>
<translation>Форма</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="232"/>
<source>logo</source>
<translation>лого</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="233"/>
<source>Seafile</source>
<translation>Seafile</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="234"/>
<source>minimize</source>
<translation>згорнути</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="235"/>
<source>close</source>
<translation>закрити</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="238"/>
<source>...</source>
<translation>...</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="236"/>
<source>Select</source>
<translation>Обрати</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="237"/>
<source>or Drop Folder to Sync</source>
<translation>чи Перетягніть каталог</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="239"/>
<source>download rate</source>
<translation>швидкість завантаження</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="240"/>
<source>downarrow</source>
<translation>стрілка вниз</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="241"/>
<source>upload rate</source>
<translation>швидкість віддачі</translation>
</message>
<message>
<location filename="../ui_cloud-view.h" line="242"/>
<source>uparrow</source>
<translation>стрілка вгору</translation>
</message>
</context>
<context>
<name>Configurator</name>
<message>
<location filename="../src/configurator.cpp" line="81"/>
<source>Error when creating ccnet configuration</source>
<translation>Помилка при створенні конфігурації ccnet</translation>
</message>
<message>
<location filename="../src/configurator.cpp" line="97"/>
<source>Unable to create preconfigure directory "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/configurator.cpp" line="214"/>
<source>failed to read %1</source>
<translation>не вдалося прочитати %1</translation>
</message>
<message>
<location filename="../src/configurator.cpp" line="259"/>
<source>%1 Default Library</source>
<translation>%1 Бібліотека за замовчанням</translation>
</message>
</context>
<context>
<name>CreateRepoDialog</name>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="31"/>
<source>Create a library</source>
<translation>Створення бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="59"/>
<source>Please choose a directory</source>
<translation>Будь ласка, оберіть каталог</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="90"/>
<source>Creating...</source>
<translation>Створення...</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="118"/>
<source>Please choose the directory to sync</source>
<translation>Будь ласка, оберіть каталог для синхронізації</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="122"/>
<source>The folder %1 does not exist</source>
<translation>Каталог %1 не існує</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="127"/>
<source>Please enter the name</source>
<translation>Будь ласка, введіть ім'я</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="134"/>
<source>Please enter the password</source>
<translation>Будь ласка, введіть пароль</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="141"/>
<source>Passwords don't match</source>
<translation>Паролі не збігаються</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="152"/>
<source>Unknown error</source>
<translation>Невідома помилка</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="186"/>
<source>Failed to add download task:
%1</source>
<translation>Не вдалося додати завдання завантаження:
%1</translation>
</message>
<message>
<location filename="../src/ui/create-repo-dialog.cpp" line="199"/>
<source>Failed to create library on the server:
%1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="180"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="181"/>
<source>Path:</source>
<translation>Шлях:</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="182"/>
<source>Choose</source>
<translation>Вибрати</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="183"/>
<source>Name:</source>
<translation>Назва:</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="184"/>
<source>encrypted</source>
<translation>зашифровано</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="185"/>
<source>Password:</source>
<translation>Пароль:</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="186"/>
<source>Password Again:</source>
<translation>Пароль ще раз:</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="187"/>
<source>status text</source>
<translation>статус</translation>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="188"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_create-repo-dialog.h" line="189"/>
<source>Cancel</source>
<translation>Відміна</translation>
</message>
</context>
<context>
<name>DaemonManager</name>
<message>
<location filename="../src/daemon-mgr.cpp" line="63"/>
<source>failed to load ccnet config dir %1</source>
<translation>не вдалося завантажити конфігураційний каталог ccnet %1</translation>
</message>
</context>
<context>
<name>DownloadRepoDialog</name>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="229"/>
<location filename="../src/ui/download-repo-dialog.cpp" line="299"/>
<source>Please enter the password</source>
<translation>Будь ласка, введіть пароль</translation>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="83"/>
<source>Sync library "%1"</source>
<translation>Синхронізація бібліотеки "%1"</translation>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="86"/>
<source>Sync folder "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="93"/>
<source>Sync to folder:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="122"/>
<source>If a sub-folder with same name exists, its contents will be merged.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="150"/>
<source>or</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="155"/>
<source>sync with an existing folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="159"/>
<source>create a new sync folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="163"/>
<source>Sync with this existing folder:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="191"/>
<source>Please choose a folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="221"/>
<source>The folder does not exist</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="242"/>
<source>Please choose the folder to sync.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="249"/>
<source>Your organization disables putting a library outside %1 folder.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="266"/>
<source>Conflicting with existing file "%1", please choose a different folder.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="274"/>
<source>Conflicting with existing library "%1", please choose a different folder.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="279"/>
<source>The folder "%1" already exists. Are you sure to sync with it (contents will be merged)?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="280"/>
<source>Click No to sync with a new folder instead</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="288"/>
<source>Unable to find an alternative folder name</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="350"/>
<source>Failed to add download task:
%1</source>
<translation>Не вдалося додати завдання завантаження:
%1 </translation>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="361"/>
<source>Failed to get repo download information:
%1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_download-repo-dialog.h" line="233"/>
<source>Download Library</source>
<translation>Завантажити бібліотеку</translation>
</message>
<message>
<location filename="../ui_download-repo-dialog.h" line="234"/>
<source>choose...</source>
<translation>обрати...</translation>
</message>
<message>
<location filename="../ui_download-repo-dialog.h" line="235"/>
<source>Password for this library:</source>
<translation>Пароль для цієї бібліотеки</translation>
</message>
<message>
<location filename="../ui_download-repo-dialog.h" line="236"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_download-repo-dialog.h" line="237"/>
<source>Cancel</source>
<translation>Скасувати</translation>
</message>
</context>
<context>
<name>EventDetailsDialog</name>
<message>
<location filename="../src/ui/event-details-dialog.cpp" line="36"/>
<source>Modification Details</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EventDetailsListModel</name>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="296"/>
<source>Added files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="297"/>
<source>Deleted files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="298"/>
<source>Modified files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="300"/>
<source>Added folders</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="301"/>
<source>Deleted folders</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EventDetailsListView</name>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="219"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="223"/>
<source>Open &parent folder</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileBrowserDialog</name>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="91"/>
<source>Cloud File Browser</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="274"/>
<source>Back</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="283"/>
<source>Forward</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="291"/>
<source>Home</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="324"/>
<source>Upload files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="330"/>
<source>Upload a directory</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="351"/>
<source>You don't have permission to upload files to this library</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="338"/>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="433"/>
<source>Create a folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="252"/>
<source>Minimize</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="259"/>
<source>Close</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="434"/>
<source>Folder name</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="442"/>
<source>Invalid folder name!</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="448"/>
<source>The name "%1" is already taken.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="464"/>
<source>retry</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="465"/>
<source>Failed to get files information<br/>Please %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="496"/>
<source>Enter name of file to save to...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="503"/>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="527"/>
<source>Unable to remove file "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="512"/>
<source>Enter the path of the folder you want to save to...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="523"/>
<source>Do you want to overwrite the existing file "%1"?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="604"/>
<source>File "%1" haven't been synced</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="641"/>
<source>Feature not supported</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="709"/>
<source>File %1 already exists.<br/>Do you like to overwrite it?<br/><small>(Choose No to upload using an alternative name).</small></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="752"/>
<source>Failed to download file: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="776"/>
<source>Failed to upload file: %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="907"/>
<source>Select a file to upload</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="915"/>
<source>Select a directory to upload</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="926"/>
<source>Unable to create cache folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="928"/>
<source>Unable to open cache folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="953"/>
<source>Rename</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="975"/>
<source>Do you really want to delete file "%1"?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="976"/>
<source>Do you really want to delete folder "%1"?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="983"/>
<source>Do you really want to delete these items</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1026"/>
<source>Create folder failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1053"/>
<source>Lock file failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1069"/>
<source>Select a file to update %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1077"/>
<source>Rename failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1091"/>
<source>Remove failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1101"/>
<source>Share failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1163"/>
<source>Cannot paste files from the same folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1187"/>
<source>Copy failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1202"/>
<source>Move failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="1230"/>
<source>Create library failed!</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileBrowserProgressDialog</name>
<message>
<location filename="../src/filebrowser/progress-dialog.cpp" line="28"/>
<source>Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/progress-dialog.cpp" line="55"/>
<source>Upload</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/progress-dialog.cpp" line="56"/>
<source>Uploading %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/progress-dialog.cpp" line="58"/>
<source>Download</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/progress-dialog.cpp" line="59"/>
<source>Downloading %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/progress-dialog.cpp" line="89"/>
<source>%1 of %2</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileNetworkTask</name>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="128"/>
<source>Operation canceled</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="173"/>
<source>pending</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileServerTask</name>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="711"/>
<source>Internal Server Error</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileTableModel</name>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="876"/>
<source>locked by %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="924"/>
<source>Name</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="926"/>
<source>Size</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="928"/>
<source>Last Modified</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="930"/>
<source>Kind</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>FileTableView</name>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="279"/>
<location filename="../src/filebrowser/file-table.cpp" line="505"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="284"/>
<location filename="../src/filebrowser/file-table.cpp" line="482"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="289"/>
<location filename="../src/filebrowser/file-table.cpp" line="515"/>
<source>&Lock</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="297"/>
<source>&Rename</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="302"/>
<source>&Delete</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="307"/>
<source>&Generate Share Link</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="312"/>
<source>G&enerate Seafile Internal Link</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="321"/>
<source>&Update</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="325"/>
<source>&Copy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="329"/>
<source>Cu&t</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="333"/>
<source>&Paste</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="342"/>
<source>Canc&el Download</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="347"/>
<source>&Sync this folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="294"/>
<location filename="../src/filebrowser/file-table.cpp" line="353"/>
<source>This feature is available in pro version only
</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="457"/>
<source>&Save As To...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="459"/>
<location filename="../src/filebrowser/file-table.cpp" line="520"/>
<source>D&ownload</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="510"/>
<source>Un&lock</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="635"/>
<source>Unable to remove readonly files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-table.cpp" line="717"/>
<source>Unable to cut readonly files</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>GetFileTask</name>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="390"/>
<location filename="../src/filebrowser/tasks.cpp" line="433"/>
<source>Failed to create folders</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="399"/>
<source>Failed to create temporary files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="448"/>
<source>Failed to write file to disk</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="454"/>
<source>Failed to remove the older version of the downloaded file</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="460"/>
<source>Failed to move file</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>InitSeafileDialog</name>
<message>
<location filename="../src/ui/init-seafile-dialog.cpp" line="103"/>
<source>%1 Initialization</source>
<translation>%1 ініціалізація</translation>
</message>
<message>
<location filename="../src/ui/init-seafile-dialog.cpp" line="141"/>
<location filename="../src/ui/init-seafile-dialog.cpp" line="156"/>
<source>Please choose a directory</source>
<translation>Будь ласка, оберіть каталог</translation>
</message>
<message>
<location filename="../src/ui/init-seafile-dialog.cpp" line="184"/>
<location filename="../src/ui/init-seafile-dialog.cpp" line="195"/>
<source>Initialization is not finished. Really quit?</source>
<translation>Ініціалізацію не закінчено. Дійсно вийти?</translation>
</message>
<message>
<location filename="../src/ui/init-seafile-dialog.cpp" line="164"/>
<source>The folder %1 does not exist</source>
<translation>Каталог %1 не існує</translation>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="190"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="191"/>
<source>Choose Seafile folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="192"/>
<source>logo</source>
<translation>лого</translation>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="193"/>
<source>Please choose a folder. We will create a Seafile subfolder in it. When you download a library, it will be saved there by default.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="194"/>
<source>Choose...</source>
<translation>Оберіть...</translation>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="195"/>
<source>Next</source>
<translation>Далі</translation>
</message>
<message>
<location filename="../ui_init-seafile-dialog.h" line="196"/>
<source>Cancel</source>
<translation>Скасувати</translation>
</message>
</context>
<context>
<name>InitVirtualDriveDialog</name>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="74"/>
<source>Checking your default library...</source>
<translation>Перевірка Вашої бібліотеки за замовчанням...</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="88"/>
<source>Creating the default library...</source>
<translation>Створення бібліотеки за замовчанням...</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="139"/>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="155"/>
<source>Failed to create default library:
The server version must be 2.1 or higher to support this.</source>
<translation>Не вдалося створити бібліотеку за замовчанням:
Необхідна версія сервера 2.1 або вище для підтримки цієї функції.</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="142"/>
<source>Failed to get default library:
%1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="158"/>
<source>Failed to create default library:
%1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="185"/>
<source>Downloading default library...</source>
<translation>Завантаження бібліотеки за замовчанням...</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="194"/>
<source>Failed to download default library:
%1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="205"/>
<source>The default library has been downloaded.
You can click the "Open" button to view it.</source>
<translation>Вашу бібліотеку за замовчанням було завантажено.
Клікніть "Відкрити" щоб переглянути ії.</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="256"/>
<source>Error when downloading the default library: %1</source>
<translation>Помилка при завантаженні бібліотеки за замовчанням: %1</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="270"/>
<source>Creating the virtual disk...</source>
<translation>Створення віртуального диску...</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="179"/>
<source>Failed to download default library:
%1</source>
<translation>Не вдалося скачати бібліотеку за замовчанням:
%1</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="185"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="188"/>
<source>Seafile organizes files by libraries.
Do you like to download your default library and create a virtual disk?</source>
<translation>Seafile організує файли в бібліотеках.
Бажаєте завантажити вашу бібліотеку і створити віртуальний диск?</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="191"/>
<source>Skip</source>
<translation>Пропустити</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="192"/>
<source>Run in Background</source>
<translation>Виконати в фоновому режимі</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="193"/>
<source>Open</source>
<translation>Відкрити</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="194"/>
<source>Finish</source>
<translation>Готово</translation>
</message>
<message>
<location filename="../src/ui/init-vdrive-dialog.cpp" line="40"/>
<location filename="../ui_init-vdrive-dialog.h" line="186"/>
<source>Download Default Library</source>
<translation>Скачати бібліотеку за замовчанням</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="190"/>
<source>Yes</source>
<translation>Так</translation>
</message>
<message>
<location filename="../ui_init-vdrive-dialog.h" line="187"/>
<source>logo</source>
<translation>лого</translation>
</message>
</context>
<context>
<name>LoginDialog</name>
<message>
<location filename="../src/ui/login-dialog.cpp" line="68"/>
<location filename="../ui_login-dialog.h" line="261"/>
<source>Add an account</source>
<translation>Додати обліковий запис</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="116"/>
<location filename="../src/ui/login-dialog.cpp" line="117"/>
<source>Re-login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="132"/>
<source>Logging in...</source>
<translation>Вхід...</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="172"/>
<source>Network Error:
%1</source>
<translation>Помилка мережі:
%1</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="184"/>
<source><b>Warning:</b> The ssl certificate of this server is not trusted, proceed anyway?</source>
<translation><b>Увага:</b> SSL-сертифікат цього сервера не є надійним! Продовжувати?</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="198"/>
<source>Please enter the server address</source>
<translation>Будь ласка, введіть адресу сервера</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="202"/>
<location filename="../src/ui/login-dialog.cpp" line="208"/>
<location filename="../src/ui/login-dialog.cpp" line="310"/>
<location filename="../src/ui/login-dialog.cpp" line="316"/>
<source>%1 is not a valid server address</source>
<translation>%1 адреса сервера не припустима</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="215"/>
<source>Please enter the username</source>
<translation>Будь ласка, введіть ім'я користувача</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="226"/>
<source>Please enter the computer name</source>
<translation>Будь ласка, введіть ім'я комп'ютера</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="244"/>
<source>Failed to save current account</source>
<translation>Не вдалося зберігти поточний аккаунт</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="301"/>
<source>Seafile Server Address</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="220"/>
<source>Please enter the password</source>
<translation>Будь ласка, введіть пароль</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="271"/>
<source>Incorrect email or password</source>
<translation>Невірні email або пароль</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="273"/>
<source>Logging in too frequently, please wait a minute</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="275"/>
<source>Internal Server Error</source>
<translation>Внутрішня помилка на сервері</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="279"/>
<source>Failed to login: %1</source>
<translation>Не вдалося увійти в систему з логіном: %1</translation>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="281"/>
<source>Failed to login</source>
<translation>Не вдалося увійти в систему</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="260"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="262"/>
<source>logo</source>
<translation>лого</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="263"/>
<source>Server:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_login-dialog.h" line="264"/>
<source><html><head/><body><p>For example: https://seacloud.cc</p></body></html></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_login-dialog.h" line="265"/>
<source>or http://192.168.1.24:8000</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_login-dialog.h" line="266"/>
<source>Email:</source>
<translation>Email:</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="267"/>
<source>Password:</source>
<translation>Пароль:</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="270"/>
<source>status text</source>
<translation>текст статусу</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="268"/>
<source>Computer Name:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_login-dialog.h" line="269"/>
<source>e.g. Jim's laptop</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/login-dialog.cpp" line="107"/>
<location filename="../src/ui/login-dialog.cpp" line="300"/>
<location filename="../ui_login-dialog.h" line="271"/>
<source>Shibboleth Login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_login-dialog.h" line="272"/>
<source>Login</source>
<translation>Логін</translation>
</message>
<message>
<location filename="../ui_login-dialog.h" line="273"/>
<source>Cancel</source>
<translation>Відміна</translation>
</message>
</context>
<context>
<name>LogoutView</name>
<message>
<location filename="../src/ui/logout-view.cpp" line="55"/>
<source>You are logout. Please </source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/logout-view.cpp" line="55"/>
<source>login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/logout-view.cpp" line="59"/>
<source>Add an account</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>MainWindow</name>
<message>
<location filename="../src/ui/main-window.cpp" line="201"/>
<source>Refresh</source>
<translation>Оновити</translation>
</message>
</context>
<context>
<name>MessageListener</name>
<message>
<location filename="../src/message-listener.cpp" line="77"/>
<source>failed to load ccnet config dir </source>
<translation>не вдалося завантажити конфігураційний каталог ccnet </translation>
</message>
<message>
<location filename="../src/message-listener.cpp" line="152"/>
<source>"%1" is unsynced.
Reason: Deleted on server</source>
<translation>"%1" розсинхронізовано.
Причина: Видалено на сервері</translation>
</message>
<message>
<location filename="../src/message-listener.cpp" line="162"/>
<source>"%1" is synchronized</source>
<translation>"%1" синхронізовано</translation>
</message>
<message>
<location filename="../src/message-listener.cpp" line="175"/>
<source>"%1" failed to sync.
Access denied to service</source>
<translation>"%1" не вдалося синхронізувати.
Відмовлено в доступі до служби</translation>
</message>
<message>
<location filename="../src/message-listener.cpp" line="186"/>
<source>"%1" failed to sync.
The library owner's storage space is used up.</source>
<translation>"%1" не вдалося синхронізувати.
Власник бібліотеки вичерпав дозволене місце для зберігання.</translation>
</message>
</context>
<context>
<name>PostFileTask</name>
<message>
<location filename="../src/filebrowser/tasks.cpp" line="504"/>
<location filename="../src/filebrowser/tasks.cpp" line="509"/>
<source>File does not exist</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QObject</name>
<message>
<location filename="../src/rpc/local-repo.cpp" line="75"/>
<source>synchronized</source>
<translation>синхронізований</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="79"/>
<source>indexing files</source>
<translation>індексація файлів</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="83"/>
<location filename="../src/rpc/local-repo.cpp" line="115"/>
<source>sync initializing</source>
<translation> ініціалізація синхронізації</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="87"/>
<source>downloading</source>
<translation>завантаження</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="91"/>
<source>uploading</source>
<translation>віддача</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="95"/>
<source>sync merging</source>
<translation>синхронізувати злиття</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="99"/>
<source>waiting for sync</source>
<translation>очікування синхронізації</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="103"/>
<location filename="../src/rpc/local-repo.cpp" line="130"/>
<source>server not connected</source>
<translation>сервер не з'єднано</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="107"/>
<source>server authenticating</source>
<translation>аутентифікація на сервері</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="69"/>
<location filename="../src/rpc/local-repo.cpp" line="111"/>
<source>auto sync is turned off</source>
<translation>автоматичну синхронізацію вимкнено</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="120"/>
<source>unknown</source>
<translation>невідомий</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="133"/>
<source>Server has been removed</source>
<translation>Сервер був видалений</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="136"/>
<source>You have not logged in to the server</source>
<translation>Ви не ввійшли до серверу</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="139"/>
<source>You do not have permission to access this library</source>
<translation>У вас немає дозволу на доступ до цієї бібліотеки</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="142"/>
<source>The storage space of the library owner has been used up</source>
<translation>Власник бібліотеки вичерпав дозволене місце для зберігання</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="145"/>
<source>Remote service is not available</source>
<translation>Віддалений сервіс недоступний</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="148"/>
<location filename="../src/rpc/local-repo.cpp" line="190"/>
<source>Access denied to service</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="151"/>
<source>Internal data corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="154"/>
<source>Failed to start upload</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="157"/>
<source>Error occured in upload</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="160"/>
<source>Failed to start download</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="163"/>
<source>Error occured in download</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="169"/>
<source>Library is damaged on server</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="172"/>
<source>Conflict in merge</source>
<translation>Конфлікт у злитті</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="175"/>
<source>Server version is too old</source>
<translation>Занадто стара версія сервера </translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="181"/>
<source>Unknown error</source>
<translation>Невідома помилка</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="184"/>
<source>The storage quota has been used up</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="187"/>
<source>Internal server error</source>
<translation>Внутрішня помилка сервера</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="193"/>
<source>Your %1 client is too old</source>
<translation>Ваш клієнт %1 занадто старий</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="196"/>
<location filename="../src/rpc/local-repo.cpp" line="199"/>
<location filename="../src/rpc/local-repo.cpp" line="202"/>
<location filename="../src/rpc/local-repo.cpp" line="205"/>
<location filename="../src/rpc/local-repo.cpp" line="208"/>
<location filename="../src/rpc/local-repo.cpp" line="210"/>
<location filename="../src/rpc/local-repo.cpp" line="212"/>
<location filename="../src/rpc/local-repo.cpp" line="214"/>
<location filename="../src/rpc/local-repo.cpp" line="216"/>
<location filename="../src/rpc/local-repo.cpp" line="218"/>
<location filename="../src/rpc/local-repo.cpp" line="220"/>
<location filename="../src/rpc/local-repo.cpp" line="222"/>
<location filename="../src/rpc/local-repo.cpp" line="224"/>
<location filename="../src/rpc/local-repo.cpp" line="226"/>
<location filename="../src/rpc/local-repo.cpp" line="228"/>
<location filename="../src/rpc/local-repo.cpp" line="230"/>
<location filename="../src/rpc/local-repo.cpp" line="232"/>
<location filename="../src/rpc/local-repo.cpp" line="234"/>
<location filename="../src/rpc/local-repo.cpp" line="236"/>
<location filename="../src/rpc/local-repo.cpp" line="238"/>
<location filename="../src/rpc/local-repo.cpp" line="240"/>
<location filename="../src/rpc/local-repo.cpp" line="242"/>
<source>Failed to sync this library</source>
<translation>Не вдалося синхронізувати цю бібліотеку</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="245"/>
<source>Files are locked by other application</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="166"/>
<source>Library is deleted on server</source>
<translation>Бібліотека видалена на сервері</translation>
</message>
<message>
<location filename="../src/rpc/local-repo.cpp" line="178"/>
<source>Error when accessing the local folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="71"/>
<source>initializing...</source>
<translation>Ініціалізація...</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="74"/>
<source>connecting server...</source>
<translation>з'єднання з сервером...</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="77"/>
<source>indexing files...</source>
<translation>індексація файлів...</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="80"/>
<source>Downloading...</source>
<translation>Завантаження...</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="86"/>
<source>Creating folder...</source>
<translation>Створення каталогу...</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="92"/>
<source>Merge file changes...</source>
<translation>Об'єднання змін файлів...</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="95"/>
<source>Done</source>
<translation>Готово</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="98"/>
<source>Canceling</source>
<translation>Скасування</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="101"/>
<source>Canceled</source>
<translation>Скасовано</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="105"/>
<source>Failed to index local files.</source>
<translation>Не вдалося індексувати локальні файли.</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="108"/>
<source>Failed to create local files.</source>
<translation>Не вдалося створити локальні файли.</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="111"/>
<source>Failed to merge local file changes.</source>
<translation>Не вдалося об'єднати зміни локальних файлів.</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="114"/>
<source>Incorrect password. Please download again.</source>
<translation>Невірний пароль. Будь ласка, спробуйте ще.</translation>
</message>
<message>
<location filename="../src/rpc/clone-task.cpp" line="116"/>
<source>Internal error.</source>
<translation>Внутрішня помилка.</translation>
</message>
<message>
<location filename="../src/api/api-error.cpp" line="55"/>
<source>SSL Error</source>
<translation>SSL Помилка</translation>
</message>
<message>
<location filename="../src/api/api-error.cpp" line="57"/>
<source>Network Error: %1</source>
<translation>Помилка мережі: %1</translation>
</message>
<message>
<location filename="../src/api/api-error.cpp" line="59"/>
<source>Server Error</source>
<translation>Помилка серверу</translation>
</message>
<message>
<location filename="../src/certs-mgr.cpp" line="77"/>
<source>failed to open certs database</source>
<translation>не вдалося відкрити базу даних сертифікатів</translation>
</message>
<message>
<location filename="../src/repo-service-helper.cpp" line="22"/>
<location filename="../src/repo-service-helper.cpp" line="85"/>
<source>File "%1" doesn't exist in "%2"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/repo-service-helper.cpp" line="27"/>
<source>%1 couldn't find an application to open file %2</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/api/event.cpp" line="50"/>
<source>Created library "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/api/event.cpp" line="52"/>
<source>Deleted library "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/data-cache.cpp" line="89"/>
<source>failed to open file cache database</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/file-browser-dialog.cpp" line="954"/>
<source>Rename %1 to</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/repo-service-helper.cpp" line="124"/>
<source>Unable to download item "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="886"/>
<source>copy failed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="128"/>
<location filename="../src/ui/event-details-tree.cpp" line="136"/>
<location filename="../src/utils/translate-commit-desc.cpp" line="22"/>
<source>Added</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="130"/>
<location filename="../src/ui/event-details-tree.cpp" line="138"/>
<location filename="../src/utils/translate-commit-desc.cpp" line="24"/>
<source>Deleted</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="25"/>
<source>Removed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="132"/>
<location filename="../src/utils/translate-commit-desc.cpp" line="26"/>
<source>Modified</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/event-details-tree.cpp" line="134"/>
<location filename="../src/utils/translate-commit-desc.cpp" line="27"/>
<source>Renamed</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="23"/>
<source>Added or modified</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="28"/>
<source>Moved</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="29"/>
<source>Added directory</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="30"/>
<source>Removed directory</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="31"/>
<source>Renamed directory</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="32"/>
<source>Moved directory</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="60"/>
<source>files</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="62"/>
<source>directories</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="65"/>
<source>and %1 more</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="86"/>
<source>Reverted library to status at</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="93"/>
<source>Reverted file "%1" to status at %2.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="97"/>
<source>Recovered deleted directory</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="99"/>
<source>Changed library name or description</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/translate-commit-desc.cpp" line="101"/>
<source>Auto merge by seafile system</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="559"/>
<location filename="../src/utils/utils.cpp" line="587"/>
<location filename="../src/utils/utils.cpp" line="590"/>
<source>Just now</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="575"/>
<source>1 day ago</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="575"/>
<source>%1 days ago</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="579"/>
<source>1 hour ago</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="579"/>
<source>%1 hours ago</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="583"/>
<source>1 minute ago</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="583"/>
<source>%1 minutes ago</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/utils.cpp" line="728"/>
<source><Not Part of Certificate></source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/main.cpp" line="182"/>
<source>Found another running process of %1, kill it and start a new one?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/main.cpp" line="202"/>
<source>Unable to start %1 due to the failure of shutting down the previous process</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="63"/>
<source>Sync this library to:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/download-repo-dialog.cpp" line="65"/>
<source>Sync this folder to:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="606"/>
<source>Folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="606"/>
<source>Readonly Folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="614"/>
<location filename="../src/utils/file-utils.cpp" line="646"/>
<source>Document</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="618"/>
<source>PDF Document</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="620"/>
<source>Image File</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="622"/>
<source>Text Document</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="624"/>
<source>Audio File</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="626"/>
<source>Video File</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="628"/>
<location filename="../src/utils/file-utils.cpp" line="636"/>
<source>Word Document</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="630"/>
<location filename="../src/utils/file-utils.cpp" line="640"/>
<source>PowerPoint Document</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/utils/file-utils.cpp" line="632"/>
<location filename="../src/utils/file-utils.cpp" line="638"/>
<source>Excel Document</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RepoDetailDialog</name>
<message>
<location filename="../src/ui/repo-detail-dialog.cpp" line="36"/>
<source>Library "%1"</source>
<translation>Бібліотека "%1"</translation>
</message>
<message>
<location filename="../src/ui/repo-detail-dialog.cpp" line="56"/>
<location filename="../src/ui/repo-detail-dialog.cpp" line="118"/>
<source>This library is not downloaded yet</source>
<translation>Ця бібліотека ще не завантажена</translation>
</message>
<message>
<location filename="../src/ui/repo-detail-dialog.cpp" line="83"/>
<source>Error: </source>
<translation>Помилка:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="215"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="216"/>
<source>RepoIcon</source>
<translation>Іконка репозиторію</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="217"/>
<source>RepoName</source>
<translation>Ім'я репозиторію</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="218"/>
<source>Description:</source>
<translation>Опис:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="219"/>
<location filename="../ui_repo-detail-dialog.h" line="221"/>
<location filename="../ui_repo-detail-dialog.h" line="225"/>
<location filename="../ui_repo-detail-dialog.h" line="227"/>
<source>TextLabel</source>
<translation>Текстова мітка</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="220"/>
<source>Owner:</source>
<translation>Власник:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="222"/>
<source>Last Modified:</source>
<translation>Остання зміна:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="223"/>
<source>mtime</source>
<translation>час</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="224"/>
<source>Size:</source>
<translation>Розмір:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="226"/>
<source>Local Path:</source>
<translation>Локальний шлях:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="228"/>
<source>Status:</source>
<translation>Статус:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="229"/>
<source>RepoStatus</source>
<translation>Статус</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="230"/>
<source>Name:</source>
<translation>Назва:</translation>
</message>
<message>
<location filename="../ui_repo-detail-dialog.h" line="231"/>
<source>Close</source>
<translation>Закрити</translation>
</message>
</context>
<context>
<name>RepoItemDelegate</name>
<message>
<location filename="../src/ui/repo-item-delegate.cpp" line="254"/>
<source>, %1%2</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-item-delegate.cpp" line="259"/>
<source>, %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-item-delegate.cpp" line="454"/>
<source>This library has not been downloaded</source>
<translation>Цю бібліотеку не було завантажено</translation>
</message>
</context>
<context>
<name>RepoService</name>
<message>
<location filename="../src/repo-service.cpp" line="333"/>
<location filename="../src/repo-service.cpp" line="368"/>
<source>Unable to open file "%1" from nonexistent library "%2"</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RepoTreeModel</name>
<message>
<location filename="../src/ui/repo-tree-model.cpp" line="60"/>
<source>Recently Updated</source>
<translation>Нещодавно оновлено</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-model.cpp" line="61"/>
<source>My Libraries</source>
<translation>Мої бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-model.cpp" line="62"/>
<source>Sub Libraries</source>
<translation>Суб-бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-model.cpp" line="63"/>
<source>Private Shares</source>
<translation>Спільний доступ</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-model.cpp" line="64"/>
<source>Synced Libraries</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-model.cpp" line="244"/>
<source>Organization</source>
<translation>Організація</translation>
</message>
</context>
<context>
<name>RepoTreeView</name>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="283"/>
<location filename="../src/ui/repo-tree-view.cpp" line="284"/>
<source>Disable auto sync</source>
<translation>Вимкнути автосинхронізацію</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="287"/>
<location filename="../src/ui/repo-tree-view.cpp" line="288"/>
<location filename="../src/ui/repo-tree-view.cpp" line="389"/>
<location filename="../src/ui/repo-tree-view.cpp" line="390"/>
<source>Enable auto sync</source>
<translation>Увімкнути автосинхронізацію</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="342"/>
<source>Show &details</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="344"/>
<source>Show details of this library</source>
<translation>Показати деталі цієї бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="348"/>
<location filename="../src/ui/repo-tree-view.cpp" line="354"/>
<source>&Sync this library</source>
<translation>&Синхронізація цієї бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="350"/>
<location filename="../src/ui/repo-tree-view.cpp" line="356"/>
<source>Sync this library</source>
<translation>Синхронізація цієї бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="162"/>
<source>Recently Updated</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="360"/>
<source>Sync &now</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="362"/>
<source>Sync this library immediately</source>
<translation>Синхронізувати цю бібліотеку негайно</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="365"/>
<source>&Cancel download</source>
<translation>&Відмінити завантаження</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="367"/>
<source>Cancel download of this library</source>
<translation>Відмінити завантаження цієї бібліотеки</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="371"/>
<location filename="../src/ui/repo-tree-view.cpp" line="377"/>
<source>&Open folder</source>
<translation>&Відкрити каталог</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="373"/>
<location filename="../src/ui/repo-tree-view.cpp" line="379"/>
<source>open local folder</source>
<translation>відкрити локальний каталог</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="383"/>
<source>&Unsync</source>
<translation>&Розсинхронізація</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="384"/>
<source>unsync this library</source>
<translation>розсинхронізувати цю бібліотеку</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="394"/>
<source>&View on cloud</source>
<translation>&Перегляд у хмарі</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="396"/>
<source>view this library on seahub</source>
<translation>переглянути цю бібліотеку на вебсайті</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="401"/>
<source>&Open cloud file browser</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="403"/>
<source>open this library in embedded Cloud File Browser</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="408"/>
<source>&Resync this library</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="410"/>
<source>unsync and resync this library</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="449"/>
<source>Are you sure to unsync library "%1"?</source>
<translation>Ви дійсно хочете розсинхронізувати бібліотеку "%1"?</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="461"/>
<location filename="../src/ui/repo-tree-view.cpp" line="722"/>
<source>Failed to unsync library "%1"</source>
<translation>Не вдалося розсинхронізувати бібліотеку "%1"</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="658"/>
<source>Failed to cancel this task:
%1</source>
<translation>Не вдалося скасувати це завдання:
%1</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="662"/>
<source>The download has been canceled</source>
<translation>Завантаження було скасоване</translation>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="711"/>
<source>Are you sure to unsync and resync library "%1"?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="749"/>
<source>Failed to add download task:
%1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="781"/>
<source>Unable to overwrite file "%1" with itself</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="786"/>
<source>Are you sure to overwrite file "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="789"/>
<source>Unable to delete file "%1"</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repo-tree-view.cpp" line="879"/>
<source>Failed to upload file: %1</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ReposTab</name>
<message>
<location filename="../src/ui/repos-tab.cpp" line="52"/>
<source>Search libraries...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repos-tab.cpp" line="109"/>
<source>retry</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/repos-tab.cpp" line="110"/>
<source>Failed to get libraries information<br/>Please %1</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SeafileApiClient</name>
<message>
<location filename="../src/api/api-client.cpp" line="138"/>
<source><b>Warning:</b> The ssl certificate of this server is not trusted, proceed anyway?</source>
<translation><b> Увага: </ B> SSL-сертифікат цього сервера не є надійним, в будь-якому випадку продовжити?</translation>
</message>
</context>
<context>
<name>SeafileApplet</name>
<message>
<location filename="../src/seafile-applet.cpp" line="356"/>
<source>failed to add default account</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/seafile-applet.cpp" line="470"/>
<source>Failed to initialize log: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/seafile-applet.cpp" line="593"/>
<source>A new version of %1 client (%2) is available.
Do you want to visit the download page?</source>
<translation>Нова версія %1 клієнту (%2) доступна.
Бажаєте відвідати сторінку завантажень?</translation>
</message>
</context>
<context>
<name>SeafileLinkDialog</name>
<message>
<location filename="../src/filebrowser/seafilelink-dialog.cpp" line="17"/>
<source>Seafile Internal Link</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/seafilelink-dialog.cpp" line="24"/>
<source>Copy to clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/seafilelink-dialog.cpp" line="30"/>
<source>Seafile Web Link:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/seafilelink-dialog.cpp" line="52"/>
<source>Seafile Protocol Link:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/seafilelink-dialog.cpp" line="85"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SeafileRpcClient</name>
<message>
<location filename="../src/rpc/rpc-client.cpp" line="44"/>
<source>failed to load ccnet config dir %1</source>
<translation>не вдалося завантажити конфігураційний каталог ccnet %1</translation>
</message>
<message>
<location filename="../src/rpc/rpc-client.cpp" line="490"/>
<location filename="../src/rpc/rpc-client.cpp" line="510"/>
<location filename="../src/rpc/rpc-client.cpp" line="580"/>
<location filename="../src/rpc/rpc-client.cpp" line="745"/>
<location filename="../src/rpc/rpc-client.cpp" line="810"/>
<source>Unknown error</source>
<translation>Невідома помилка</translation>
</message>
<message>
<location filename="../src/rpc/rpc-client.cpp" line="707"/>
<source>The path "%1" conflicts with system path</source>
<translation>Шлях "%1" конфліктує з системним шляхом</translation>
</message>
<message>
<location filename="../src/rpc/rpc-client.cpp" line="709"/>
<source>The path "%1" conflicts with an existing library</source>
<translation>Шлях "%1" конфліктує з інсуючою бібліотекою</translation>
</message>
</context>
<context>
<name>SeafileTrayIcon</name>
<message>
<location filename="../src/ui/tray-icon.cpp" line="106"/>
<source>Disable auto sync</source>
<translation>Вимкнути автосинхронізацію</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="109"/>
<source>Enable auto sync</source>
<translation>Увімкнути автосинхронізацію</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="112"/>
<source>View unread notifications</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="116"/>
<source>&Quit</source>
<translation>&Вихід</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="119"/>
<source>Show main window</source>
<translation>Показати головне вікно</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="122"/>
<source>Settings</source>
<translation>Налаштування</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="125"/>
<source>Open %1 &folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="126"/>
<source>open %1 folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="129"/>
<source>Open &logs folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="133"/>
<source>&About</source>
<translation>&Інфо</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="134"/>
<source>Show the application's About box</source>
<translation>Показати інформацію про програму</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="137"/>
<source>&Online help</source>
<translation>&Онлайн допомога</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="144"/>
<source>Help</source>
<translation>Допомога</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="184"/>
<source>File</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location filename="../src/ui/tray-icon.cpp" line="492"/>
<source>You have %n message(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="498"/>
<source>auto sync is disabled</source>
<translation>автосинхронізацію вимкнено</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="525"/>
<source>Uploading</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="526"/>
<source>Downloading</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="413"/>
<source>About %1</source>
<translation>Біля %1</translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="130"/>
<source>open %1 log folder</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="138"/>
<source>open %1 online help</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="414"/>
<source><h2>%1 Client %2</h2></source>
<translation><h2>%1 Клієнт %2</h2></translation>
</message>
<message>
<location filename="../src/ui/tray-icon.cpp" line="503"/>
<source>some servers not connected</source>
<translation>деякі сервери не підключені</translation>
</message>
</context>
<context>
<name>SearchTab</name>
<message>
<location filename="../src/ui/search-tab.cpp" line="80"/><|fim▁hole|> <translation type="unfinished"/>
</message>
</context>
<context>
<name>ServerStatusDialog</name>
<message>
<location filename="../src/ui/server-status-dialog.cpp" line="13"/>
<source>Servers connection status</source>
<translation>Статус з'єднання</translation>
</message>
<message>
<location filename="../src/ui/server-status-dialog.cpp" line="32"/>
<source>connected</source>
<translation>з'єднано</translation>
</message>
<message>
<location filename="../src/ui/server-status-dialog.cpp" line="35"/>
<source>disconnected</source>
<translation>від'єднано</translation>
</message>
<message>
<location filename="../ui_server-status-dialog.h" line="70"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_server-status-dialog.h" line="71"/>
<source>Close</source>
<translation>Закрити</translation>
</message>
</context>
<context>
<name>SetRepoPasswordDialog</name>
<message>
<location filename="../src/ui/set-repo-password-dialog.cpp" line="14"/>
<source>Please provide the library password</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/set-repo-password-dialog.cpp" line="21"/>
<source>Provide the password for library %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/set-repo-password-dialog.cpp" line="35"/>
<source>Please enter the password</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/set-repo-password-dialog.cpp" line="61"/>
<source>Incorrect password</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/set-repo-password-dialog.cpp" line="63"/>
<source>Unknown error</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_set-repo-password-dialog.h" line="115"/>
<source>Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_set-repo-password-dialog.h" line="116"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_set-repo-password-dialog.h" line="117"/>
<source>Cancel</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SettingsDialog</name>
<message>
<location filename="../src/ui/settings-dialog.cpp" line="29"/>
<source>Settings</source>
<translation>Налаштування</translation>
</message>
<message>
<location filename="../src/ui/settings-dialog.cpp" line="44"/>
<source>None</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/settings-dialog.cpp" line="45"/>
<source>HTTP Proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/settings-dialog.cpp" line="46"/>
<source>Socks5 Proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/settings-dialog.cpp" line="96"/>
<source>You have changed languange. Restart to apply it?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/settings-dialog.cpp" line="99"/>
<source>You have changed proxy settings. Restart to apply it?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="404"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="405"/>
<source>Hide main window when started</source>
<translation>Ховати головне вікно при старті</translation>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="406"/>
<source>Notify when libraries are synchronized</source>
<translation>Повідомляти про синхронізацію бібліотек</translation>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="407"/>
<source>Enable sync temporary files of MSOffice/Libreoffice</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="408"/>
<source>Auto start Seafile after login</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="412"/>
<source>Check for new version on startup</source>
<translation>Перевірка нових версій при старті</translation>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="413"/>
<source>Download speed limit (KB/s):</source>
<translation>Обмеження швидкості скачування (KB/s):</translation>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="414"/>
<source>Upload speed limit (KB/s):</source>
<translation>Обмеження швидкості завантаження (KB/s):</translation>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="415"/>
<source>Basic</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="416"/>
<source>Do not automatically unsync a library</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="417"/>
<source>Do not automatically unsync a library when its local directory is removed or unaccessible for other reasons.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="418"/>
<source>Do not unsync a library when not found on server</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="419"/>
<source>Do not automatically unsync a library when it's not found on server</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="409"/>
<source>Hide Seafile Icon from the dock</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="410"/>
<source>Enable FinderSync Extension</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="411"/>
<source>Enable Explorer Extension</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="420"/>
<source>Do not verify server certificate in HTTPS syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="421"/>
<source>Enable syncing with an existing folder with a different name</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="422"/>
<source>Advanced</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="423"/>
<source>Language (need restart)</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="424"/>
<source>Language</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="425"/>
<source>Proxy Type:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="426"/>
<source>Host:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="427"/>
<source>Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="428"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="429"/>
<source>Username:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="430"/>
<source>Password:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="431"/>
<source>Proxy server requires a password</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="432"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="433"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_settings-dialog.h" line="434"/>
<source>Cancel</source>
<translation>Відміна</translation>
</message>
</context>
<context>
<name>SharedLinkDialog</name>
<message>
<location filename="../src/filebrowser/sharedlink-dialog.cpp" line="14"/>
<source>Share Link</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/sharedlink-dialog.cpp" line="18"/>
<source>Share link:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/sharedlink-dialog.cpp" line="31"/>
<source>Direct Download</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/sharedlink-dialog.cpp" line="44"/>
<source>Copy to clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/filebrowser/sharedlink-dialog.cpp" line="48"/>
<source>OK</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ShibLoginDialog</name>
<message>
<location filename="../src/shib/shib-login-dialog.cpp" line="30"/>
<source>Login with Shibboleth</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/shib/shib-login-dialog.cpp" line="85"/>
<source>Failed to save current account</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SslConfirmDialog</name>
<message>
<location filename="../src/ui/ssl-confirm-dialog.cpp" line="15"/>
<source>Untrusted Connection</source>
<translation>Ненадійне з'єднання</translation>
</message>
<message>
<location filename="../src/ui/ssl-confirm-dialog.cpp" line="18"/>
<source>%1 uses an invalid security certificate. The connection may be insecure. Do you want to continue?</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/ssl-confirm-dialog.cpp" line="24"/>
<source>Current RSA key fingerprint is %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/ssl-confirm-dialog.cpp" line="27"/>
<source>Previous RSA key fingerprint is %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../ui_ssl-confirm-dialog.h" line="96"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_ssl-confirm-dialog.h" line="97"/>
<source>Remember my choice</source>
<translation>Запам'ятати мій вибір</translation>
</message>
<message>
<location filename="../ui_ssl-confirm-dialog.h" line="98"/>
<source>Yes</source>
<translation>Так</translation>
</message>
<message>
<location filename="../ui_ssl-confirm-dialog.h" line="99"/>
<source>No</source>
<translation>Ні</translation>
</message>
</context>
<context>
<name>StarredFilesListView</name>
<message>
<location filename="../src/ui/starred-files-list-view.cpp" line="40"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/starred-files-list-view.cpp" line="43"/>
<source>Open this file</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/starred-files-list-view.cpp" line="46"/>
<source>view on &Web</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/starred-files-list-view.cpp" line="49"/>
<source>view this file on website</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>StarredFilesTab</name>
<message>
<location filename="../src/ui/starred-files-tab.cpp" line="93"/>
<source>retry</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/starred-files-tab.cpp" line="94"/>
<source>Failed to get starred files information<br/>Please %1</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../src/ui/starred-files-tab.cpp" line="115"/>
<source>You have no starred files yet.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>UninstallHelperDialog</name>
<message>
<location filename="../src/ui/uninstall-helper-dialog.cpp" line="21"/>
<source>Uninstall %1</source>
<translation>Деінсталювати %1</translation>
</message>
<message>
<location filename="../src/ui/uninstall-helper-dialog.cpp" line="24"/>
<source>Do you want to remove the %1 account information?</source>
<translation>Дійсно хочете видалити інформацю облікового запису %1 ?</translation>
</message>
<message>
<location filename="../src/ui/uninstall-helper-dialog.cpp" line="49"/>
<source>Removing account information...</source>
<translation>Видалення інформації облікового запису...</translation>
</message>
<message>
<location filename="../ui_uninstall-helper-dialog.h" line="106"/>
<source>Dialog</source>
<translation>Діалог</translation>
</message>
<message>
<location filename="../ui_uninstall-helper-dialog.h" line="107"/>
<source>text</source>
<translation>текст</translation>
</message>
<message>
<location filename="../ui_uninstall-helper-dialog.h" line="108"/>
<source>Yes</source>
<translation>Так</translation>
</message>
<message>
<location filename="../ui_uninstall-helper-dialog.h" line="109"/>
<source>No</source>
<translation>Ні</translation>
</message>
</context>
</TS><|fim▁end|> | <source>Search Files...</source> |
<|file_name|>s_4732.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | search_result['4732']=["topic_0000000000000B84_events--.html","ConfigWrapper Events",""]; |
<|file_name|>_function_registry.py<|end_file_name|><|fim▁begin|># Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import importlib.util
import os
import sys
import types
from functions_framework.exceptions import (
InvalidConfigurationException,
InvalidTargetTypeException,
MissingTargetException,
)
DEFAULT_SOURCE = os.path.realpath("./main.py")
FUNCTION_SIGNATURE_TYPE = "FUNCTION_SIGNATURE_TYPE"
HTTP_SIGNATURE_TYPE = "http"
CLOUDEVENT_SIGNATURE_TYPE = "cloudevent"
BACKGROUNDEVENT_SIGNATURE_TYPE = "event"
# REGISTRY_MAP stores the registered functions.
# Keys are user function names, values are user function signature types.
REGISTRY_MAP = {}
def get_user_function(source, source_module, target):
"""Returns user function, raises exception for invalid function."""
# Extract the target function from the source file
if not hasattr(source_module, target):
raise MissingTargetException(
"File {source} is expected to contain a function named {target}".format(
source=source, target=target
)
)
function = getattr(source_module, target)
# Check that it is a function
if not isinstance(function, types.FunctionType):
raise InvalidTargetTypeException(
"The function defined in file {source} as {target} needs to be of "
"type function. Got: invalid type {target_type}".format(
source=source, target=target, target_type=type(function)
)
)
return function
def load_function_module(source):
"""Load user function source file."""
# 1. Extract the module name from the source path
realpath = os.path.realpath(source)
directory, filename = os.path.split(realpath)
name, extension = os.path.splitext(filename)
# 2. Create a new module
spec = importlib.util.spec_from_file_location(
name, realpath, submodule_search_locations=[directory]
)
source_module = importlib.util.module_from_spec(spec)
# 3. Add the directory of the source to sys.path to allow the function to
# load modules relative to its location
sys.path.append(directory)
# 4. Add the module to sys.modules
sys.modules[name] = source_module
return source_module, spec
<|fim▁hole|>def get_function_source(source):
"""Get the configured function source."""
source = source or os.environ.get("FUNCTION_SOURCE", DEFAULT_SOURCE)
# Python 3.5: os.path.exist does not support PosixPath
source = str(source)
return source
def get_function_target(target):
"""Get the configured function target."""
target = target or os.environ.get("FUNCTION_TARGET", "")
# Set the environment variable if it wasn't already
os.environ["FUNCTION_TARGET"] = target
if not target:
raise InvalidConfigurationException(
"Target is not specified (FUNCTION_TARGET environment variable not set)"
)
return target
def get_func_signature_type(func_name: str, signature_type: str) -> str:
"""Get user function's signature type.
Signature type is searched in the following order:
1. Decorator user used to register their function
2. --signature-type flag
3. environment variable FUNCTION_SIGNATURE_TYPE
If none of the above is set, signature type defaults to be "http".
"""
registered_type = REGISTRY_MAP[func_name] if func_name in REGISTRY_MAP else ""
sig_type = (
registered_type
or signature_type
or os.environ.get(FUNCTION_SIGNATURE_TYPE, HTTP_SIGNATURE_TYPE)
)
# Set the environment variable if it wasn't already
os.environ[FUNCTION_SIGNATURE_TYPE] = sig_type
# Update signature type for legacy GCF Python 3.7
if os.environ.get("ENTRY_POINT"):
os.environ["FUNCTION_TRIGGER_TYPE"] = sig_type
return sig_type<|fim▁end|> | |
<|file_name|>powers_of_two.py<|end_file_name|><|fim▁begin|>def powers_of_two(limit):
value = 1
while value < limit:
yield value
value += value
# Use the generator
for i in powers_of_two(70):
print(i)
# Explore the mechanism
g = powers_of_two(100)<|fim▁hole|>assert next(g) == 4
assert next(g) == 8<|fim▁end|> | assert str(type(powers_of_two)) == "<class 'function'>"
assert str(type(g)) == "<class 'generator'>"
assert g.__next__() == 1
assert g.__next__() == 2 |
<|file_name|>pre_commit_linter.py<|end_file_name|><|fim▁begin|># coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pre-commit script for Oppia.
This script lints Python and JavaScript code, and prints a
list of lint errors to the terminal. If the directory path is passed,
it will lint all Python and JavaScript files in that directory; otherwise,
it will only lint files that have been touched in this commit.
This script ignores all filepaths contained within the excludeFiles
argument in .jscsrc. Note that, as a side-effect, these filepaths will also
prevent Python files in those paths from being linted.
IMPORTANT NOTES:
1. Before running this script, you must install third-party dependencies by
running
bash scripts/start.sh
at least once.
=====================
CUSTOMIZATION OPTIONS
=====================
1. To lint only files that have been touched in this commit
python scripts/pre_commit_linter.py
2. To lint all files in the folder or to lint just a specific file
python scripts/pre_commit_linter.py --path filepath
3. To lint a specific list of files (*.js/*.py only). Separate files by spaces
python scripts/pre_commit_linter.py --files file_1 file_2 ... file_n
Note that the root folder MUST be named 'oppia'.
"""
# Pylint has issues with the import order of argparse.
# pylint: disable=wrong-import-order
import argparse
import fnmatch
import multiprocessing
import os
import json
import subprocess
import sys
import time
# pylint: enable=wrong-import-order
_PARSER = argparse.ArgumentParser()
_EXCLUSIVE_GROUP = _PARSER.add_mutually_exclusive_group()
_EXCLUSIVE_GROUP.add_argument(
'--path',
help='path to the directory with files to be linted',
action='store')
_EXCLUSIVE_GROUP.add_argument(
'--files',
nargs='+',
help='specific files to be linted. Space separated list',
action='store')
BAD_PATTERNS = {
'__author__': {
'message': 'Please remove author tags from this file.',
'excluded_files': ()},
'datetime.datetime.now()': {
'message': 'Please use datetime.datetime.utcnow() instead of'
'datetime.datetime.now().',
'excluded_files': ()},
'\t': {
'message': 'Please use spaces instead of tabs.',
'excluded_files': ()},
'\r': {
'message': 'Please make sure all files only have LF endings (no CRLF).',
'excluded_files': ()},
'glyphicon': {
'message': 'Please use equivalent material-icons '
'instead of glyphicons.',
'excluded_files': ()}
}
BAD_PATTERNS_JS = {
' == ': {
'message': 'Please replace == with === in this file.',
'excluded_files': (
'core/templates/dev/head/expressions/parserSpec.js',
'core/templates/dev/head/expressions/evaluatorSpec.js',
'core/templates/dev/head/expressions/typeParserSpec.js')},
' != ': {
'message': 'Please replace != with !== in this file.',
'excluded_files': (
'core/templates/dev/head/expressions/parserSpec.js',
'core/templates/dev/head/expressions/evaluatorSpec.js',
'core/templates/dev/head/expressions/typeParserSpec.js')}
}
EXCLUDED_PATHS = (
'third_party/*', '.git/*', '*.pyc', 'CHANGELOG',
'scripts/pre_commit_linter.py', 'integrations/*',
'integrations_dev/*', '*.svg', '*.png', '*.zip', '*.ico', '*.jpg')
if not os.getcwd().endswith('oppia'):
print ''
print 'ERROR Please run this script from the oppia root directory.'
_PARENT_DIR = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
_PYLINT_PATH = os.path.join(_PARENT_DIR, 'oppia_tools', 'pylint-1.5.2')
if not os.path.exists(_PYLINT_PATH):
print ''
print 'ERROR Please run start.sh first to install pylint '<|fim▁hole|> print ' and its dependencies.'
sys.exit(1)
_PATHS_TO_INSERT = [
_PYLINT_PATH,
os.getcwd(),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'webapp2-2.3'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'yaml-3.10'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine', 'lib', 'jinja2-2.6'),
os.path.join(
_PARENT_DIR, 'oppia_tools', 'google_appengine_1.9.19',
'google_appengine'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'webtest-1.4.2'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'numpy-1.6.1'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'browsermob-proxy-0.7.1'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'selenium-2.53.2'),
os.path.join(_PARENT_DIR, 'oppia_tools', 'xvfbwrapper-0.2.8'),
os.path.join('third_party', 'gae-pipeline-1.9.17.0'),
os.path.join('third_party', 'bleach-1.2.2'),
os.path.join('third_party', 'gae-mapreduce-1.9.17.0'),
]
for path in _PATHS_TO_INSERT:
sys.path.insert(0, path)
from pylint import lint # pylint: disable=wrong-import-position
_MESSAGE_TYPE_SUCCESS = 'SUCCESS'
_MESSAGE_TYPE_FAILED = 'FAILED'
def _get_changed_filenames():
"""Returns a list of modified files (both staged and unstaged)
Returns:
a list of filenames of modified files
"""
unstaged_files = subprocess.check_output([
'git', 'diff', '--name-only']).splitlines()
staged_files = subprocess.check_output([
'git', 'diff', '--cached', '--name-only',
'--diff-filter=ACM']).splitlines()
return unstaged_files + staged_files
def _get_glob_patterns_excluded_from_jscsrc(config_jscsrc):
"""Collects excludeFiles from jscsrc file.
Args:
- config_jscsrc: str. Path to .jscsrc file.
Returns:
a list of files in excludeFiles.
"""
with open(config_jscsrc) as f:
f.readline() # First three lines are comments
f.readline()
f.readline()
json_data = json.loads(f.read())
return json_data['excludeFiles']
def _get_all_files_in_directory(dir_path, excluded_glob_patterns):
"""Recursively collects all files in directory and
subdirectories of specified path.
Args:
- dir_path: str. Path to the folder to be linted.
- excluded_glob_patterns: set. Set of all files to be excluded.
Returns:
a list of files in directory and subdirectories without excluded files.
"""
files_in_directory = []
for _dir, _, files in os.walk(dir_path):
for file_name in files:
filename = os.path.relpath(
os.path.join(_dir, file_name), os.getcwd())
if not any([fnmatch.fnmatch(filename, gp) for gp in
excluded_glob_patterns]):
files_in_directory.append(filename)
return files_in_directory
def _lint_js_files(node_path, jscs_path, config_jscsrc, files_to_lint, stdout,
result):
"""Prints a list of lint errors in the given list of JavaScript files.
Args:
- node_path: str. Path to the node binary.
- jscs_path: str. Path to the JSCS binary.
- config_jscsrc: str. Configuration args for the call to the JSCS binary.
- files_to_lint: list of str. A list of filepaths to lint.
- stdout: multiprocessing.Queue. A queue to store JSCS outputs
- result: multiprocessing.Queue. A queue to put results of test
Returns:
None
"""
start_time = time.time()
num_files_with_errors = 0
num_js_files = len(files_to_lint)
if not files_to_lint:
result.put('')
print 'There are no JavaScript files to lint.'
return
jscs_cmd_args = [node_path, jscs_path, config_jscsrc]
for _, filename in enumerate(files_to_lint):
proc_args = jscs_cmd_args + [filename]
proc = subprocess.Popen(
proc_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
linter_stdout, linter_stderr = proc.communicate()
if linter_stderr:
print 'LINTER FAILED'
print linter_stderr
sys.exit(1)
if linter_stdout:
num_files_with_errors += 1
stdout.put(linter_stdout)
if num_files_with_errors:
result.put('%s %s JavaScript files' % (
_MESSAGE_TYPE_FAILED, num_files_with_errors))
else:
result.put('%s %s JavaScript files linted (%.1f secs)' % (
_MESSAGE_TYPE_SUCCESS, num_js_files, time.time() - start_time))
def _lint_py_files(config_pylint, files_to_lint, result):
"""Prints a list of lint errors in the given list of Python files.
Args:
- config_pylint: str. Path to the .pylintrc file.
- files_to_lint: list of str. A list of filepaths to lint.
- result: multiprocessing.Queue. A queue to put results of test
Returns:
None
"""
start_time = time.time()
are_there_errors = False
num_py_files = len(files_to_lint)
if not files_to_lint:
result.put('')
print 'There are no Python files to lint.'
return
try:
# This prints output to the console.
lint.Run(files_to_lint + [config_pylint])
except SystemExit as e:
if str(e) != '0':
are_there_errors = True
if are_there_errors:
result.put('%s Python linting failed' % _MESSAGE_TYPE_FAILED)
else:
result.put('%s %s Python files linted (%.1f secs)' % (
_MESSAGE_TYPE_SUCCESS, num_py_files, time.time() - start_time))
def _get_all_files():
"""This function is used to check if this script is ran from
root directory and to return a list of all the files for linting and
pattern checks.
"""
jscsrc_path = os.path.join(os.getcwd(), '.jscsrc')
parsed_args = _PARSER.parse_args()
if parsed_args.path:
input_path = os.path.join(os.getcwd(), parsed_args.path)
if not os.path.exists(input_path):
print 'Could not locate file or directory %s. Exiting.' % input_path
print '----------------------------------------'
sys.exit(1)
if os.path.isfile(input_path):
all_files = [input_path]
else:
excluded_glob_patterns = _get_glob_patterns_excluded_from_jscsrc(
jscsrc_path)
all_files = _get_all_files_in_directory(
input_path, excluded_glob_patterns)
elif parsed_args.files:
valid_filepaths = []
invalid_filepaths = []
for f in parsed_args.files:
if os.path.isfile(f):
valid_filepaths.append(f)
else:
invalid_filepaths.append(f)
if invalid_filepaths:
print ('The following file(s) do not exist: %s\n'
'Exiting.' % invalid_filepaths)
sys.exit(1)
all_files = valid_filepaths
else:
all_files = _get_changed_filenames()
return all_files
def _pre_commit_linter(all_files):
"""This function is used to check if node-jscs dependencies are installed
and pass JSCS binary path
"""
jscsrc_path = os.path.join(os.getcwd(), '.jscsrc')
pylintrc_path = os.path.join(os.getcwd(), '.pylintrc')
config_jscsrc = '--config=%s' % jscsrc_path
config_pylint = '--rcfile=%s' % pylintrc_path
parent_dir = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
node_path = os.path.join(
parent_dir, 'oppia_tools', 'node-4.2.1', 'bin', 'node')
jscs_path = os.path.join(
parent_dir, 'node_modules', 'jscs', 'bin', 'jscs')
if not os.path.exists(jscs_path):
print ''
print 'ERROR Please run start.sh first to install node-jscs '
print ' and its dependencies.'
sys.exit(1)
js_files_to_lint = [
filename for filename in all_files if filename.endswith('.js')]
py_files_to_lint = [
filename for filename in all_files if filename.endswith('.py')]
js_result = multiprocessing.Queue()
linting_processes = []
js_stdout = multiprocessing.Queue()
linting_processes.append(multiprocessing.Process(
target=_lint_js_files, args=(node_path, jscs_path, config_jscsrc,
js_files_to_lint, js_stdout, js_result)))
py_result = multiprocessing.Queue()
linting_processes.append(multiprocessing.Process(
target=_lint_py_files,
args=(config_pylint, py_files_to_lint, py_result)))
print 'Starting Javascript and Python Linting'
print '----------------------------------------'
for process in linting_processes:
process.start()
for process in linting_processes:
process.join()
js_messages = []
while not js_stdout.empty():
js_messages.append(js_stdout.get())
print ''
print '\n'.join(js_messages)
print '----------------------------------------'
summary_messages = []
summary_messages.append(js_result.get())
summary_messages.append(py_result.get())
print '\n'.join(summary_messages)
print ''
return summary_messages
def _check_bad_patterns(all_files):
"""This function is used for detecting bad patterns.
"""
print 'Starting Pattern Checks'
print '----------------------------------------'
total_files_checked = 0
total_error_count = 0
summary_messages = []
all_files = [
filename for filename in all_files if not
any(fnmatch.fnmatch(filename, pattern) for pattern in EXCLUDED_PATHS)]
all_js_files = [
filename for filename in all_files if filename.endswith('.js')]
failed = False
for filename in all_files:
with open(filename) as f:
content = f.read()
total_files_checked += 1
for pattern in BAD_PATTERNS:
if pattern in content and filename not in (
BAD_PATTERNS[pattern]['excluded_files']):
failed = True
print '%s --> %s' % (
filename, BAD_PATTERNS[pattern]['message'])
total_error_count += 1
if filename in all_js_files:
for pattern in BAD_PATTERNS_JS:
if filename not in (
BAD_PATTERNS_JS[pattern]['excluded_files']):
if pattern in content:
failed = True
print '%s --> %s' % (
filename,
BAD_PATTERNS_JS[pattern]['message'])
total_error_count += 1
if failed:
summary_message = '%s Pattern checks failed' % _MESSAGE_TYPE_FAILED
summary_messages.append(summary_message)
else:
summary_message = '%s Pattern checks passed' % _MESSAGE_TYPE_SUCCESS
summary_messages.append(summary_message)
print ''
print '----------------------------------------'
print ''
if total_files_checked == 0:
print "There are no files to be checked."
else:
print '(%s files checked, %s errors found)' % (
total_files_checked, total_error_count)
print summary_message
return summary_messages
def main():
all_files = _get_all_files()
linter_messages = _pre_commit_linter(all_files)
pattern_messages = _check_bad_patterns(all_files)
all_messages = linter_messages + pattern_messages
if any([message.startswith(_MESSAGE_TYPE_FAILED) for message in
all_messages]):
sys.exit(1)
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>matrix.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Adam Perry <[email protected]>
//
// This software may be modified and distributed under the terms of the MIT license. See the
// LICENSE file for details.
use std::ffi::CString;
use std::ops::Deref;
// use libc::{c_int, c_char};
use parasail_sys::{ParasailMatrix, parasail_matrix_create, parasail_matrix_free,
parasail_matrix_lookup};
/// A substitution matrix to use when aligning DNA or protein. Can be reused in many profiles.
pub struct Matrix {
matrix_type: MatrixType,
internal_rep: *const ParasailMatrix,
}
unsafe impl Send for Matrix {}
unsafe impl Sync for Matrix {}
impl Matrix {
/// Either create a dynamic substitution matrix (as in `MatrixType::Identity`) or look up a statically allocated matrix (as in any of the native parasail PAM and BLOSUM matrices).
///
/// # Examples
///
/// ```
/// # use parasailors::*;
/// // create & lookup substitution matrices
/// let identity_matrix = Matrix::new(MatrixType::Identity);
/// let blosum62 = Matrix::new(MatrixType::Blosum62);
/// let pam120 = Matrix::new(MatrixType::Pam120);
/// ```
pub fn new(matrix_type: MatrixType) -> Self {
unsafe {
// we can pass this pointer because it will outlive this unsafe block
// parasail won't keep a hold of it after the lookup
let matrix: *const ParasailMatrix = match matrix_type {
MatrixType::Identity => {
let alphabet = &CString::new("ARNDCQEGHILKMFPSTWYVBZX")
.expect("An internal error has occurred (creating \
identity matrix). Please file an issue at \
https://github.\
com/dikaiosune/parasailors/issues with a sample \
of the code that caused this error.");
parasail_matrix_create(alphabet.as_ptr(), 1, 0)
}
MatrixType::IdentityWithPenalty => {
let alphabet = &CString::new("ARNDCQEGHILKMFPSTWYVBZX")
.expect("An internal error has occurred (creating \
identity matrix). Please file an issue at \
https://github.\
com/dikaiosune/parasailors/issues with a sample \<|fim▁hole|> parasail_matrix_create(alphabet.as_ptr(), 1, -1)
}
_ => {
let lookup_name = match matrix_type {
MatrixType::Blosum100 => "blosum100",
MatrixType::Blosum30 => "blosum30",
MatrixType::Blosum35 => "blosum35",
MatrixType::Blosum40 => "blosum40",
MatrixType::Blosum45 => "blosum45",
MatrixType::Blosum50 => "blosum50",
MatrixType::Blosum55 => "blosum55",
MatrixType::Blosum60 => "blosum60",
MatrixType::Blosum62 => "blosum62",
MatrixType::Blosum65 => "blosum65",
MatrixType::Blosum70 => "blosum70",
MatrixType::Blosum75 => "blosum75",
MatrixType::Blosum80 => "blosum80",
MatrixType::Blosum85 => "blosum85",
MatrixType::Blosum90 => "blosum90",
MatrixType::Pam10 => "pam10",
MatrixType::Pam100 => "pam100",
MatrixType::Pam110 => "pam110",
MatrixType::Pam120 => "pam120",
MatrixType::Pam130 => "pam130",
MatrixType::Pam140 => "pam140",
MatrixType::Pam150 => "pam150",
MatrixType::Pam160 => "pam160",
MatrixType::Pam170 => "pam170",
MatrixType::Pam180 => "pam180",
MatrixType::Pam190 => "pam190",
MatrixType::Pam20 => "pam20",
MatrixType::Pam200 => "pam200",
MatrixType::Pam210 => "pam210",
MatrixType::Pam220 => "pam220",
MatrixType::Pam230 => "pam230",
MatrixType::Pam240 => "pam240",
MatrixType::Pam250 => "pam250",
MatrixType::Pam260 => "pam260",
MatrixType::Pam270 => "pam270",
MatrixType::Pam280 => "pam280",
MatrixType::Pam290 => "pam290",
MatrixType::Pam30 => "pam30",
MatrixType::Pam300 => "pam300",
MatrixType::Pam310 => "pam310",
MatrixType::Pam320 => "pam320",
MatrixType::Pam330 => "pam330",
MatrixType::Pam340 => "pam340",
MatrixType::Pam350 => "pam350",
MatrixType::Pam360 => "pam360",
MatrixType::Pam370 => "pam370",
MatrixType::Pam380 => "pam380",
MatrixType::Pam390 => "pam390",
MatrixType::Pam40 => "pam40",
MatrixType::Pam400 => "pam400",
MatrixType::Pam410 => "pam410",
MatrixType::Pam420 => "pam420",
MatrixType::Pam430 => "pam430",
MatrixType::Pam440 => "pam440",
MatrixType::Pam450 => "pam450",
MatrixType::Pam460 => "pam460",
MatrixType::Pam470 => "pam470",
MatrixType::Pam480 => "pam480",
MatrixType::Pam490 => "pam490",
MatrixType::Pam50 => "pam50",
MatrixType::Pam500 => "pam500",
MatrixType::Pam60 => "pam60",
MatrixType::Pam70 => "pam70",
MatrixType::Pam80 => "pam80",
MatrixType::Pam90 => "pam90",
_ => "",
};
let lookup = &CString::new(lookup_name)
.expect("An internal error has occurred (matrix lookup \
with hardcoded string name). Please file an issue \
at https://github.\
com/dikaiosune/parasailors/issues with a sample \
of the code that caused this error.");
// we need a cast here because we have to store both mut and const
parasail_matrix_lookup(lookup.as_ptr())
}
};
// it's OK to keep this pointer forever, it points to static const structs
Matrix {
internal_rep: matrix,
matrix_type: matrix_type,
}
}
}
}
#[doc(hidden)]
impl Deref for Matrix {
type Target = *const ParasailMatrix;
fn deref(&self) -> &(*const ParasailMatrix) {
&self.internal_rep
}
}
#[doc(hidden)]
impl Drop for Matrix {
fn drop(&mut self) {
if let MatrixType::Identity = self.matrix_type {
unsafe { parasail_matrix_free(self.internal_rep as *mut ParasailMatrix) }
}
if let MatrixType::IdentityWithPenalty = self.matrix_type {
unsafe { parasail_matrix_free(self.internal_rep as *mut ParasailMatrix) }
}
}
}
/// Denotes the type of the substitution matrix. Use Identity for simple edit-distance calculations.
pub enum MatrixType {
/// The identity matrix awards 1 score for each direct match, and 0 score for each mismatch.
Identity,
/// An identity matrix which awards 1 score for each match and penalizes -1 for each mismatch.
IdentityWithPenalty,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 100 substitution matrix.
Blosum100,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 30 substitution matrix.
Blosum30,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 35 substitution matrix.
Blosum35,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 40 substitution matrix.
Blosum40,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 45 substitution matrix.
Blosum45,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 50 substitution matrix.
Blosum50,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 55 substitution matrix.
Blosum55,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 60 substitution matrix.
Blosum60,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 62 substitution matrix.
Blosum62,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 65 substitution matrix.
Blosum65,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 70 substitution matrix.
Blosum70,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 75 substitution matrix.
Blosum75,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 80 substitution matrix.
Blosum80,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 85 substitution matrix.
Blosum85,
/// The [BLOSUM](https://en.wikipedia.org/wiki/BLOSUM) 90 substitution matrix.
Blosum90,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 10 substitution matrix.
Pam10,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 100 substitution matrix.
Pam100,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 110 substitution matrix.
Pam110,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 120 substitution matrix.
Pam120,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 130 substitution matrix.
Pam130,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 140 substitution matrix.
Pam140,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 150 substitution matrix.
Pam150,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 160 substitution matrix.
Pam160,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 170 substitution matrix.
Pam170,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 180 substitution matrix.
Pam180,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 190 substitution matrix.
Pam190,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 20 substitution matrix.
Pam20,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 200 substitution matrix.
Pam200,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 210 substitution matrix.
Pam210,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 220 substitution matrix.
Pam220,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 230 substitution matrix.
Pam230,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 240 substitution matrix.
Pam240,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 250 substitution matrix.
Pam250,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 260 substitution matrix.
Pam260,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 270 substitution matrix.
Pam270,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 280 substitution matrix.
Pam280,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 290 substitution matrix.
Pam290,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 30 substitution matrix.
Pam30,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 300 substitution matrix.
Pam300,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 310 substitution matrix.
Pam310,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 320 substitution matrix.
Pam320,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 330 substitution matrix.
Pam330,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 340 substitution matrix.
Pam340,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 350 substitution matrix.
Pam350,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 360 substitution matrix.
Pam360,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 370 substitution matrix.
Pam370,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 380 substitution matrix.
Pam380,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 390 substitution matrix.
Pam390,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 40 substitution matrix.
Pam40,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 400 substitution matrix.
Pam400,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 410 substitution matrix.
Pam410,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 420 substitution matrix.
Pam420,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 430 substitution matrix.
Pam430,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 440 substitution matrix.
Pam440,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 450 substitution matrix.
Pam450,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 460 substitution matrix.
Pam460,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 470 substitution matrix.
Pam470,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 480 substitution matrix.
Pam480,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 490 substitution matrix.
Pam490,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 50 substitution matrix.
Pam50,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 500 substitution matrix.
Pam500,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 60 substitution matrix.
Pam60,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 70 substitution matrix.
Pam70,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 80 substitution matrix.
Pam80,
/// The [PAM](https://en.wikipedia.org/wiki/Point_accepted_mutation) 90 substitution matrix.
Pam90,
}<|fim▁end|> | of the code that caused this error."); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># encoding: utf-8
__author__ = "Nils Tobias Schmidt"
__email__ = "schmidt89 at informatik.uni-marburg.de"
from androlyze.error.WrapperException import WrapperException
############################################################
#---Helper functions
############################################################
def _create_delete_error_msg(content, destination):
return "Could not delete %s from %s" % (content, destination)
def _create_store_error_msg(content, destination):
return "Could not store result for %s to %s" % (content, destination)
def _create_load_error_msg(content, source):
return "Could not load %s from %s" % (content, source)
############################################################
#---General storage exceptions
############################################################
class StorageException(WrapperException):
''' Base exception for data storage '''
pass
############################################################
#---Database storage exceptions
############################################################
DB_STORE = "database"
class DatabaseException(StorageException):
pass
class EDatabaseException(DatabaseException):
''' Extended DatabaseException that has the database as parameter as well as content '''
def __init__(self, db, content, caused_by = None, **kwargs):
'''
Parameters
----------
db : object
content : object
The object that couldn't be loaded/stored.
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
DatabaseException.__init__(self, caused_by = caused_by, **kwargs)
self.db = db
self.content = content
class DatabaseDeleteException(EDatabaseException):
def _msg(self):
return _create_delete_error_msg(self.content, self.db)
class DatabaseStoreException(EDatabaseException):
def _msg(self):
return _create_store_error_msg(self.content, self.db)
class DatabaseLoadException(EDatabaseException):
def _msg(self):
return _create_load_error_msg(self.content, self.db)
class DatabaseOpenError(DatabaseException):
def __init__(self, db_name, **kwargs):
super(DatabaseOpenError, self).__init__(**kwargs)
self.db_name = db_name
def _msg(self):
return 'Could not open database: "%s"' % self.db_name
############################################################
#---S3 storage exceptions
############################################################
DB_STORE = "database"
class S3StorageException(StorageException):
pass
class ES3StorageException(S3StorageException):
''' Extended DatabaseException that has the database as parameter as well as content '''
def __init__(self, db, content, caused_by = None, **kwargs):
'''
Parameters
----------
db : object
content : object
The object that couldn't be loaded/stored.
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
S3StorageException.__init__(self, caused_by = caused_by, **kwargs)
self.db = db
self.content = content
class S3StorageDeleteException(ES3StorageException):
def _msg(self):
return _create_delete_error_msg(self.content, self.db)
class S3StorageStoreException(ES3StorageException):
def _msg(self):
return _create_store_error_msg(self.content, self.db)
class S3StorageLoadException(ES3StorageException):
def _msg(self):
return _create_load_error_msg(self.content, self.db)
class S3StorageOpenError(ES3StorageException):
def __init__(self, db_name, **kwargs):
super(ES3StorageException, self).__init__(**kwargs)
self.db_name = db_name
def _msg(self):
return 'Could not open bucket: "%s"' % self.db_name
############################################################
#---File system storage exceptions
############################################################
class FileSysException(StorageException):
def __init__(self, file_path, fs_storage, *args, **kwargs):
'''
Parameters
----------
file_path: str
the path of the file
fs_store : FileSysStorage
'''
super(FileSysException, self).__init__(*args, **kwargs)
self.file_path = file_path
self.fs_storage = fs_storage
<|fim▁hole|> Parameters
----------
file_path: str
the path of the file
content: object
the content which should be stored
fs_store : FileSysStorage
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
super(FileSysStoreException, self).__init__(file_path, fs_storage, caused_by = caused_by)
self.content = content
def _msg(self):
return _create_store_error_msg(self.content, self.file_path)
class FileSysCreateStorageStructureException(FileSysException):
def __init__(self, file_path, fs_storage, caused_by = None):
'''
Parameters
----------
file_path: str
the path of the file
fs_store : FileSysStorage
caused_by: Exception, optional (default is None)
the exception that caused this one to raise
'''
super(FileSysCreateStorageStructureException, self).__init__(file_path, fs_storage, caused_by = caused_by)
def _msg(self):
return "Could not create the file system structure: %s" % self.file_path
class FileSysLoadException(FileSysException):
def _msg(self):
return _create_load_error_msg(self.file_path, self.fs_storage)
class FileSysDeleteException(FileSysException):
def _msg(self):
return _create_delete_error_msg(self.file_path, self.fs_storage)<|fim▁end|> | class FileSysStoreException(FileSysException):
def __init__(self, file_path, content, fs_storage, caused_by = None):
''' |
<|file_name|>snow_record.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Tim Rightnour <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: snow_record
short_description: Manage records in ServiceNow
version_added: "2.5"
description:
- Creates, deletes and updates a single record in ServiceNow.
options:
table:
description:
- Table to query for records.
required: false
default: incident
type: str
state:
description:
- If C(present) is supplied with a C(number) argument, the module will attempt to update the record with the supplied data.
- If no such record exists, a new one will be created.
- C(absent) will delete a record.
choices: [ present, absent ]
required: true
type: str
data:
description:
- key, value pairs of data to load into the record. See Examples.<|fim▁hole|> number:
description:
- Record number to update.
- Required for C(state:absent).
required: false
type: str
lookup_field:
description:
- Changes the field that C(number) uses to find records.
required: false
default: number
type: str
attachment:
description:
- Attach a file to the record.
required: false
type: str
requirements:
- python pysnow (pysnow)
author:
- Tim Rightnour (@garbled1)
extends_documentation_fragment: service_now.documentation
'''
EXAMPLES = '''
- name: Grab a user record
snow_record:
username: ansible_test
password: my_password
instance: dev99999
state: present
number: 62826bf03710200044e0bfc8bcbe5df1
table: sys_user
lookup_field: sys_id
- name: Grab a user record using OAuth
snow_record:
username: ansible_test
password: my_password
client_id: "1234567890abcdef1234567890abcdef"
client_secret: "Password1!"
instance: dev99999
state: present
number: 62826bf03710200044e0bfc8bcbe5df1
table: sys_user
lookup_field: sys_id
- name: Create an incident
snow_record:
username: ansible_test
password: my_password
instance: dev99999
state: present
data:
short_description: "This is a test incident opened by Ansible"
severity: 3
priority: 2
register: new_incident
- name: Delete the record we just made
snow_record:
username: admin
password: xxxxxxx
instance: dev99999
state: absent
number: "{{new_incident['record']['number']}}"
- name: Delete a non-existant record
snow_record:
username: ansible_test
password: my_password
instance: dev99999
state: absent
number: 9872354
failed_when: false
- name: Update an incident
snow_record:
username: ansible_test
password: my_password
instance: dev99999
state: present
number: INC0000055
data:
work_notes : "Been working all day on this thing."
- name: Attach a file to an incident
snow_record:
username: ansible_test
password: my_password
instance: dev99999
state: present
number: INC0000055
attachment: README.md
tags: attach
'''
RETURN = '''
record:
description: Record data from Service Now
type: dict
returned: when supported
attached_file:
description: Details of the file that was attached via C(attachment)
type: dict
returned: when supported
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes, to_native
from ansible.module_utils.service_now import ServiceNowClient
try:
# This is being handled by ServiceNowClient
import pysnow
except ImportError:
pass
def run_module():
# define the available arguments/parameters that a user can pass to
# the module
module_args = ServiceNowClient.snow_argument_spec()
module_args.update(
table=dict(type='str', required=False, default='incident'),
state=dict(choices=['present', 'absent'],
type='str', required=True),
number=dict(default=None, required=False, type='str'),
data=dict(default=None, required=False, type='dict'),
lookup_field=dict(default='number', required=False, type='str'),
attachment=dict(default=None, required=False, type='str')
)
module_required_together = [
['client_id', 'client_secret']
]
module_required_if = [
['state', 'absent', ['number']],
]
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True,
required_together=module_required_together,
required_if=module_required_if
)
# Connect to ServiceNow
service_now_client = ServiceNowClient(module)
service_now_client.login()
conn = service_now_client.conn
params = module.params
instance = params['instance']
table = params['table']
state = params['state']
number = params['number']
data = params['data']
lookup_field = params['lookup_field']
result = dict(
changed=False,
instance=instance,
table=table,
number=number,
lookup_field=lookup_field
)
# check for attachments
if params['attachment'] is not None:
attach = params['attachment']
b_attach = to_bytes(attach, errors='surrogate_or_strict')
if not os.path.exists(b_attach):
module.fail_json(msg="Attachment {0} not found".format(attach))
result['attachment'] = attach
else:
attach = None
# Deal with check mode
if module.check_mode:
# if we are in check mode and have no number, we would have created
# a record. We can only partially simulate this
if number is None:
result['record'] = dict(data)
result['changed'] = True
# do we want to check if the record is non-existent?
elif state == 'absent':
try:
record = conn.query(table=table, query={lookup_field: number})
res = record.get_one()
result['record'] = dict(Success=True)
result['changed'] = True
except pysnow.exceptions.NoResults:
result['record'] = None
except Exception as detail:
module.fail_json(msg="Unknown failure in query record: {0}".format(to_native(detail)), **result)
# Let's simulate modification
else:
try:
record = conn.query(table=table, query={lookup_field: number})
res = record.get_one()
for key, value in data.items():
res[key] = value
result['changed'] = True
result['record'] = res
except pysnow.exceptions.NoResults:
snow_error = "Record does not exist"
module.fail_json(msg=snow_error, **result)
except Exception as detail:
module.fail_json(msg="Unknown failure in query record: {0}".format(to_native(detail)), **result)
module.exit_json(**result)
# now for the real thing: (non-check mode)
# are we creating a new record?
if state == 'present' and number is None:
try:
record = conn.insert(table=table, payload=dict(data))
except pysnow.exceptions.UnexpectedResponseFormat as e:
snow_error = "Failed to create record: {0}, details: {1}".format(e.error_summary, e.error_details)
module.fail_json(msg=snow_error, **result)
except pysnow.legacy_exceptions.UnexpectedResponse as e:
module.fail_json(msg="Failed to create record due to %s" % to_native(e), **result)
result['record'] = record
result['changed'] = True
# we are deleting a record
elif state == 'absent':
try:
record = conn.query(table=table, query={lookup_field: number})
res = record.delete()
except pysnow.exceptions.NoResults:
res = dict(Success=True)
except pysnow.exceptions.MultipleResults:
snow_error = "Multiple record match"
module.fail_json(msg=snow_error, **result)
except pysnow.exceptions.UnexpectedResponseFormat as e:
snow_error = "Failed to delete record: {0}, details: {1}".format(e.error_summary, e.error_details)
module.fail_json(msg=snow_error, **result)
except pysnow.legacy_exceptions.UnexpectedResponse as e:
module.fail_json(msg="Failed to delete record due to %s" % to_native(e), **result)
except Exception as detail:
snow_error = "Failed to delete record: {0}".format(to_native(detail))
module.fail_json(msg=snow_error, **result)
result['record'] = res
result['changed'] = True
# We want to update a record
else:
try:
record = conn.query(table=table, query={lookup_field: number})
if data is not None:
res = record.update(dict(data))
result['record'] = res
result['changed'] = True
else:
res = record.get_one()
result['record'] = res
if attach is not None:
res = record.attach(b_attach)
result['changed'] = True
result['attached_file'] = res
except pysnow.exceptions.MultipleResults:
snow_error = "Multiple record match"
module.fail_json(msg=snow_error, **result)
except pysnow.exceptions.NoResults:
snow_error = "Record does not exist"
module.fail_json(msg=snow_error, **result)
except pysnow.exceptions.UnexpectedResponseFormat as e:
snow_error = "Failed to update record: {0}, details: {1}".format(e.error_summary, e.error_details)
module.fail_json(msg=snow_error, **result)
except pysnow.legacy_exceptions.UnexpectedResponse as e:
module.fail_json(msg="Failed to update record due to %s" % to_native(e), **result)
except Exception as detail:
snow_error = "Failed to update record: {0}".format(to_native(detail))
module.fail_json(msg=snow_error, **result)
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()<|fim▁end|> | - Required for C(state:present).
type: dict |
<|file_name|>drawable.py<|end_file_name|><|fim▁begin|>from static import tools
class DrawAble(object):<|fim▁hole|> self._zIndex=zIndex
self.__activated=None
self.activated=activated
def __del__(self):
self.activated=False
#zindex
def __getZIndex(self):
return self._zIndex
zIndex=property(__getZIndex)
#enabled
def _disable(self):
tools.spritebatch.remove(self)
def _enable(self):
tools.spritebatch.add(self)
def __setActivated(self,b):
if self.__activated!=b:
self.__activated=b
if b:
self._enable()
else:
self._disable()
def __getActivated(self):
return self.__activated
activated=property(__getActivated,__setActivated)<|fim▁end|> | def __init__(self,image,position,zIndex=0,activated=True):
self.image=image
self.position=position |
<|file_name|>WII_Socket.cpp<|end_file_name|><|fim▁begin|>// Copyright 2013 Dolphin Emulator Project
// Licensed under GPLv2+
// Refer to the license.txt file included.
#include <algorithm>
#include <mbedtls/error.h>
#ifndef _WIN32
#include <arpa/inet.h>
#include <unistd.h>
#endif
#include "Common/FileUtil.h"
#include "Core/ConfigManager.h"
#include "Core/Core.h"
#include "Core/IPC_HLE/WII_IPC_HLE.h"
#include "Core/IPC_HLE/WII_IPC_HLE_Device.h"
#include "Core/IPC_HLE/WII_Socket.h" // No Wii socket support while using NetPlay or TAS
#ifdef _WIN32
#define ERRORCODE(name) WSA##name
#define EITHER(win32, posix) win32
#else
#define ERRORCODE(name) name
#define EITHER(win32, posix) posix
#endif
char* WiiSockMan::DecodeError(s32 ErrorCode)
{
#ifdef _WIN32
// NOT THREAD SAFE
static char Message[1024];
FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM | FORMAT_MESSAGE_IGNORE_INSERTS |
FORMAT_MESSAGE_MAX_WIDTH_MASK,
nullptr, ErrorCode, MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT), Message,
sizeof(Message), nullptr);
return Message;
#else
return strerror(ErrorCode);
#endif
}
static s32 TranslateErrorCode(s32 native_error, bool isRW)
{
switch (native_error)
{
case ERRORCODE(EMSGSIZE):
ERROR_LOG(WII_IPC_NET, "Find out why this happened, looks like PEEK failure?");
return -1; // Should be -SO_EMSGSIZE
case EITHER(WSAENOTSOCK, EBADF):
return -SO_EBADF;
case ERRORCODE(EADDRINUSE):
return -SO_EADDRINUSE;
case ERRORCODE(ECONNRESET):
return -SO_ECONNRESET;
case ERRORCODE(EISCONN):
return -SO_EISCONN;
case ERRORCODE(ENOTCONN):
return -SO_EAGAIN; // After proper blocking SO_EAGAIN shouldn't be needed...
case ERRORCODE(EINPROGRESS):
return -SO_EINPROGRESS;
case ERRORCODE(EALREADY):
return -SO_EALREADY;
case ERRORCODE(EACCES):
return -SO_EACCES;
case ERRORCODE(ECONNREFUSED):
return -SO_ECONNREFUSED;
case ERRORCODE(ENETUNREACH):
return -SO_ENETUNREACH;
case ERRORCODE(EHOSTUNREACH):
return -SO_EHOSTUNREACH;
case EITHER(WSAEWOULDBLOCK, EAGAIN):
if (isRW)
{
return -SO_EAGAIN; // EAGAIN
}
else
{
return -SO_EINPROGRESS; // EINPROGRESS
}
default:
return -1;
}
}
// Don't use string! (see https://github.com/dolphin-emu/dolphin/pull/3143)
s32 WiiSockMan::GetNetErrorCode(s32 ret, const char* caller, bool isRW)
{
#ifdef _WIN32
s32 errorCode = WSAGetLastError();
#else
s32 errorCode = errno;
#endif
if (ret >= 0)
{
WiiSockMan::GetInstance().SetLastNetError(ret);
return ret;
}
ERROR_LOG(WII_IPC_NET, "%s failed with error %d: %s, ret= %d", caller, errorCode,
DecodeError(errorCode), ret);
s32 ReturnValue = TranslateErrorCode(errorCode, isRW);
WiiSockMan::GetInstance().SetLastNetError(ReturnValue);
return ReturnValue;
}
WiiSocket::~WiiSocket()
{
if (fd >= 0)
{
(void)CloseFd();
}
}
void WiiSocket::SetFd(s32 s)
{
if (fd >= 0)
(void)CloseFd();
nonBlock = false;
fd = s;
// Set socket to NON-BLOCK
#ifdef _WIN32
u_long iMode = 1;
ioctlsocket(fd, FIONBIO, &iMode);
#else
int flags;
if (-1 == (flags = fcntl(fd, F_GETFL, 0)))
flags = 0;
fcntl(fd, F_SETFL, flags | O_NONBLOCK);
#endif
}
s32 WiiSocket::CloseFd()
{
s32 ReturnValue = 0;
if (fd >= 0)
{
#ifdef _WIN32
s32 ret = closesocket(fd);
#else
s32 ret = close(fd);
#endif
ReturnValue = WiiSockMan::GetNetErrorCode(ret, "CloseFd", false);
}
else
{
ReturnValue = WiiSockMan::GetNetErrorCode(EITHER(WSAENOTSOCK, EBADF), "CloseFd", false);
}
fd = -1;
return ReturnValue;
}
s32 WiiSocket::FCntl(u32 cmd, u32 arg)
{
#define F_GETFL 3
#define F_SETFL 4
#define F_NONBLOCK 4
s32 ret = 0;
if (cmd == F_GETFL)
{
ret = nonBlock ? F_NONBLOCK : 0;
}
else if (cmd == F_SETFL)
{
nonBlock = (arg & F_NONBLOCK) == F_NONBLOCK;
}
else
{
ERROR_LOG(WII_IPC_NET, "SO_FCNTL unknown command");
}
INFO_LOG(WII_IPC_NET, "IOCTL_SO_FCNTL(%08x, %08X, %08X)", fd, cmd, arg);
return ret;
}
void WiiSocket::Update(bool read, bool write, bool except)
{
auto it = pending_sockops.begin();
while (it != pending_sockops.end())
{
s32 ReturnValue = 0;
bool forceNonBlock = false;
IPCCommandType ct = static_cast<IPCCommandType>(Memory::Read_U32(it->_CommandAddress));
if (!it->is_ssl && ct == IPC_CMD_IOCTL)
{
u32 BufferIn = Memory::Read_U32(it->_CommandAddress + 0x10);
u32 BufferInSize = Memory::Read_U32(it->_CommandAddress + 0x14);
u32 BufferOut = Memory::Read_U32(it->_CommandAddress + 0x18);
u32 BufferOutSize = Memory::Read_U32(it->_CommandAddress + 0x1C);
switch (it->net_type)
{
case IOCTL_SO_FCNTL:
{
u32 cmd = Memory::Read_U32(BufferIn + 4);
u32 arg = Memory::Read_U32(BufferIn + 8);
ReturnValue = FCntl(cmd, arg);
break;
}
case IOCTL_SO_BIND:
{
// u32 has_addr = Memory::Read_U32(BufferIn + 0x04);
sockaddr_in local_name;
WiiSockAddrIn* wii_name = (WiiSockAddrIn*)Memory::GetPointer(BufferIn + 0x08);
WiiSockMan::Convert(*wii_name, local_name);
int ret = bind(fd, (sockaddr*)&local_name, sizeof(local_name));
ReturnValue = WiiSockMan::GetNetErrorCode(ret, "SO_BIND", false);
INFO_LOG(WII_IPC_NET, "IOCTL_SO_BIND (%08X %s:%d) = %d ", fd,
inet_ntoa(local_name.sin_addr), Common::swap16(local_name.sin_port), ret);
break;
}
case IOCTL_SO_CONNECT:
{
// u32 has_addr = Memory::Read_U32(BufferIn + 0x04);
sockaddr_in local_name;
WiiSockAddrIn* wii_name = (WiiSockAddrIn*)Memory::GetPointer(BufferIn + 0x08);
WiiSockMan::Convert(*wii_name, local_name);
int ret = connect(fd, (sockaddr*)&local_name, sizeof(local_name));
ReturnValue = WiiSockMan::GetNetErrorCode(ret, "SO_CONNECT", false);
INFO_LOG(WII_IPC_NET, "IOCTL_SO_CONNECT (%08x, %s:%d)", fd, inet_ntoa(local_name.sin_addr),
Common::swap16(local_name.sin_port));
break;
}
case IOCTL_SO_ACCEPT:
{
if (BufferOutSize > 0)
{
sockaddr_in local_name;
WiiSockAddrIn* wii_name = (WiiSockAddrIn*)Memory::GetPointer(BufferOut);
WiiSockMan::Convert(*wii_name, local_name);
socklen_t addrlen = sizeof(sockaddr_in);
int ret = (s32)accept(fd, (sockaddr*)&local_name, &addrlen);
ReturnValue = WiiSockMan::GetNetErrorCode(ret, "SO_ACCEPT", true);
WiiSockMan::Convert(local_name, *wii_name, addrlen);
}
else
{
int ret = (s32)accept(fd, nullptr, nullptr);
ReturnValue = WiiSockMan::GetNetErrorCode(ret, "SO_ACCEPT", true);
}
WiiSockMan::GetInstance().AddSocket(ReturnValue);
INFO_LOG(WII_IPC_NET, "IOCTL_SO_ACCEPT "
"BufferIn: (%08x, %i), BufferOut: (%08x, %i)",
BufferIn, BufferInSize, BufferOut, BufferOutSize);
break;
}
default:
break;
}
// Fix blocking error codes
if (!nonBlock)
{
if (it->net_type == IOCTL_SO_CONNECT && ReturnValue == -SO_EISCONN)
{
ReturnValue = SO_SUCCESS;
}
}
}
else if (ct == IPC_CMD_IOCTLV)
{
SIOCtlVBuffer CommandBuffer(it->_CommandAddress);
u32 BufferIn = 0, BufferIn2 = 0;
u32 BufferInSize = 0, BufferInSize2 = 0;
u32 BufferOut = 0, BufferOut2 = 0;
u32 BufferOutSize = 0, BufferOutSize2 = 0;
if (CommandBuffer.InBuffer.size() > 0)
{
BufferIn = CommandBuffer.InBuffer.at(0).m_Address;<|fim▁hole|> BufferInSize = CommandBuffer.InBuffer.at(0).m_Size;
}
if (CommandBuffer.PayloadBuffer.size() > 0)
{
BufferOut = CommandBuffer.PayloadBuffer.at(0).m_Address;
BufferOutSize = CommandBuffer.PayloadBuffer.at(0).m_Size;
}
if (CommandBuffer.PayloadBuffer.size() > 1)
{
BufferOut2 = CommandBuffer.PayloadBuffer.at(1).m_Address;
BufferOutSize2 = CommandBuffer.PayloadBuffer.at(1).m_Size;
}
if (CommandBuffer.InBuffer.size() > 1)
{
BufferIn2 = CommandBuffer.InBuffer.at(1).m_Address;
BufferInSize2 = CommandBuffer.InBuffer.at(1).m_Size;
}
if (it->is_ssl)
{
int sslID = Memory::Read_U32(BufferOut) - 1;
if (SSLID_VALID(sslID))
{
switch (it->ssl_type)
{
case IOCTLV_NET_SSL_DOHANDSHAKE:
{
mbedtls_ssl_context* ctx = &CWII_IPC_HLE_Device_net_ssl::_SSL[sslID].ctx;
int ret = mbedtls_ssl_handshake(ctx);
if (ret)
{
char error_buffer[256] = "";
mbedtls_strerror(ret, error_buffer, sizeof(error_buffer));
ERROR_LOG(WII_IPC_SSL, "IOCTLV_NET_SSL_DOHANDSHAKE: %s", error_buffer);
}
switch (ret)
{
case 0:
Memory::Write_U32(SSL_OK, BufferIn);
break;
case MBEDTLS_ERR_SSL_WANT_READ:
Memory::Write_U32(SSL_ERR_RAGAIN, BufferIn);
if (!nonBlock)
ReturnValue = SSL_ERR_RAGAIN;
break;
case MBEDTLS_ERR_SSL_WANT_WRITE:
Memory::Write_U32(SSL_ERR_WAGAIN, BufferIn);
if (!nonBlock)
ReturnValue = SSL_ERR_WAGAIN;
break;
case MBEDTLS_ERR_X509_CERT_VERIFY_FAILED:
{
char error_buffer[256] = "";
int res = mbedtls_ssl_get_verify_result(ctx);
mbedtls_x509_crt_verify_info(error_buffer, sizeof(error_buffer), "", res);
ERROR_LOG(WII_IPC_SSL, "MBEDTLS_ERR_X509_CERT_VERIFY_FAILED (verify_result = %d): %s",
res, error_buffer);
if (res & MBEDTLS_X509_BADCERT_CN_MISMATCH)
res = SSL_ERR_VCOMMONNAME;
else if (res & MBEDTLS_X509_BADCERT_NOT_TRUSTED)
res = SSL_ERR_VROOTCA;
else if (res & MBEDTLS_X509_BADCERT_REVOKED)
res = SSL_ERR_VCHAIN;
else if (res & MBEDTLS_X509_BADCERT_EXPIRED || res & MBEDTLS_X509_BADCERT_FUTURE)
res = SSL_ERR_VDATE;
else
res = SSL_ERR_FAILED;
Memory::Write_U32(res, BufferIn);
if (!nonBlock)
ReturnValue = res;
break;
}
default:
Memory::Write_U32(SSL_ERR_FAILED, BufferIn);
break;
}
// mbedtls_ssl_get_peer_cert(ctx) seems not to work if handshake failed
// Below is an alternative to dump the peer certificate
if (SConfig::GetInstance().m_SSLDumpPeerCert && ctx->session_negotiate != nullptr)
{
const mbedtls_x509_crt* cert = ctx->session_negotiate->peer_cert;
if (cert != nullptr)
{
std::string filename = File::GetUserPath(D_DUMPSSL_IDX) +
((ctx->hostname != nullptr) ? ctx->hostname : "") +
"_peercert.der";
File::IOFile(filename, "wb").WriteBytes(cert->raw.p, cert->raw.len);
}
}
INFO_LOG(WII_IPC_SSL, "IOCTLV_NET_SSL_DOHANDSHAKE = (%d) "
"BufferIn: (%08x, %i), BufferIn2: (%08x, %i), "
"BufferOut: (%08x, %i), BufferOut2: (%08x, %i)",
ret, BufferIn, BufferInSize, BufferIn2, BufferInSize2, BufferOut,
BufferOutSize, BufferOut2, BufferOutSize2);
break;
}
case IOCTLV_NET_SSL_WRITE:
{
int ret = mbedtls_ssl_write(&CWII_IPC_HLE_Device_net_ssl::_SSL[sslID].ctx,
Memory::GetPointer(BufferOut2), BufferOutSize2);
if (SConfig::GetInstance().m_SSLDumpWrite && ret > 0)
{
std::string filename = File::GetUserPath(D_DUMPSSL_IDX) +
SConfig::GetInstance().GetUniqueID() + "_write.bin";
File::IOFile(filename, "ab").WriteBytes(Memory::GetPointer(BufferOut2), ret);
}
if (ret >= 0)
{
// Return bytes written or SSL_ERR_ZERO if none
Memory::Write_U32((ret == 0) ? SSL_ERR_ZERO : ret, BufferIn);
}
else
{
switch (ret)
{
case MBEDTLS_ERR_SSL_WANT_READ:
Memory::Write_U32(SSL_ERR_RAGAIN, BufferIn);
if (!nonBlock)
ReturnValue = SSL_ERR_RAGAIN;
break;
case MBEDTLS_ERR_SSL_WANT_WRITE:
Memory::Write_U32(SSL_ERR_WAGAIN, BufferIn);
if (!nonBlock)
ReturnValue = SSL_ERR_WAGAIN;
break;
default:
Memory::Write_U32(SSL_ERR_FAILED, BufferIn);
break;
}
}
break;
}
case IOCTLV_NET_SSL_READ:
{
int ret = mbedtls_ssl_read(&CWII_IPC_HLE_Device_net_ssl::_SSL[sslID].ctx,
Memory::GetPointer(BufferIn2), BufferInSize2);
if (SConfig::GetInstance().m_SSLDumpRead && ret > 0)
{
std::string filename = File::GetUserPath(D_DUMPSSL_IDX) +
SConfig::GetInstance().GetUniqueID() + "_read.bin";
File::IOFile(filename, "ab").WriteBytes(Memory::GetPointer(BufferIn2), ret);
}
if (ret >= 0)
{
// Return bytes read or SSL_ERR_ZERO if none
Memory::Write_U32((ret == 0) ? SSL_ERR_ZERO : ret, BufferIn);
}
else
{
switch (ret)
{
case MBEDTLS_ERR_SSL_WANT_READ:
Memory::Write_U32(SSL_ERR_RAGAIN, BufferIn);
if (!nonBlock)
ReturnValue = SSL_ERR_RAGAIN;
break;
case MBEDTLS_ERR_SSL_WANT_WRITE:
Memory::Write_U32(SSL_ERR_WAGAIN, BufferIn);
if (!nonBlock)
ReturnValue = SSL_ERR_WAGAIN;
break;
default:
Memory::Write_U32(SSL_ERR_FAILED, BufferIn);
break;
}
}
break;
}
default:
break;
}
}
else
{
Memory::Write_U32(SSL_ERR_ID, BufferIn);
}
}
else
{
switch (it->net_type)
{
case IOCTLV_SO_SENDTO:
{
u32 flags = Memory::Read_U32(BufferIn2 + 0x04);
u32 has_destaddr = Memory::Read_U32(BufferIn2 + 0x08);
// Not a string, Windows requires a const char* for sendto
const char* data = (const char*)Memory::GetPointer(BufferIn);
// Act as non blocking when SO_MSG_NONBLOCK is specified
forceNonBlock = ((flags & SO_MSG_NONBLOCK) == SO_MSG_NONBLOCK);
// send/sendto only handles MSG_OOB
flags &= SO_MSG_OOB;
sockaddr_in local_name = {0};
if (has_destaddr)
{
WiiSockAddrIn* wii_name = (WiiSockAddrIn*)Memory::GetPointer(BufferIn2 + 0x0C);
WiiSockMan::Convert(*wii_name, local_name);
}
int ret = sendto(fd, data, BufferInSize, flags,
has_destaddr ? (struct sockaddr*)&local_name : nullptr,
has_destaddr ? sizeof(sockaddr) : 0);
ReturnValue = WiiSockMan::GetNetErrorCode(ret, "SO_SENDTO", true);
DEBUG_LOG(
WII_IPC_NET,
"%s = %d Socket: %08x, BufferIn: (%08x, %i), BufferIn2: (%08x, %i), %u.%u.%u.%u",
has_destaddr ? "IOCTLV_SO_SENDTO " : "IOCTLV_SO_SEND ", ReturnValue, fd, BufferIn,
BufferInSize, BufferIn2, BufferInSize2, local_name.sin_addr.s_addr & 0xFF,
(local_name.sin_addr.s_addr >> 8) & 0xFF, (local_name.sin_addr.s_addr >> 16) & 0xFF,
(local_name.sin_addr.s_addr >> 24) & 0xFF);
break;
}
case IOCTLV_SO_RECVFROM:
{
u32 flags = Memory::Read_U32(BufferIn + 0x04);
// Not a string, Windows requires a char* for recvfrom
char* data = (char*)Memory::GetPointer(BufferOut);
int data_len = BufferOutSize;
sockaddr_in local_name;
memset(&local_name, 0, sizeof(sockaddr_in));
if (BufferOutSize2 != 0)
{
WiiSockAddrIn* wii_name = (WiiSockAddrIn*)Memory::GetPointer(BufferOut2);
WiiSockMan::Convert(*wii_name, local_name);
}
// Act as non blocking when SO_MSG_NONBLOCK is specified
forceNonBlock = ((flags & SO_MSG_NONBLOCK) == SO_MSG_NONBLOCK);
// recv/recvfrom only handles PEEK/OOB
flags &= SO_MSG_PEEK | SO_MSG_OOB;
#ifdef _WIN32
if (flags & SO_MSG_PEEK)
{
unsigned long totallen = 0;
ioctlsocket(fd, FIONREAD, &totallen);
ReturnValue = totallen;
break;
}
#endif
socklen_t addrlen = sizeof(sockaddr_in);
int ret = recvfrom(fd, data, data_len, flags,
BufferOutSize2 ? (struct sockaddr*)&local_name : nullptr,
BufferOutSize2 ? &addrlen : nullptr);
ReturnValue =
WiiSockMan::GetNetErrorCode(ret, BufferOutSize2 ? "SO_RECVFROM" : "SO_RECV", true);
INFO_LOG(WII_IPC_NET, "%s(%d, %p) Socket: %08X, Flags: %08X, "
"BufferIn: (%08x, %i), BufferIn2: (%08x, %i), "
"BufferOut: (%08x, %i), BufferOut2: (%08x, %i)",
BufferOutSize2 ? "IOCTLV_SO_RECVFROM " : "IOCTLV_SO_RECV ", ReturnValue, data,
fd, flags, BufferIn, BufferInSize, BufferIn2, BufferInSize2, BufferOut,
BufferOutSize, BufferOut2, BufferOutSize2);
if (BufferOutSize2 != 0)
{
WiiSockAddrIn* wii_name = (WiiSockAddrIn*)Memory::GetPointer(BufferOut2);
WiiSockMan::Convert(local_name, *wii_name, addrlen);
}
break;
}
default:
break;
}
}
}
if (nonBlock || forceNonBlock ||
(!it->is_ssl && ReturnValue != -SO_EAGAIN && ReturnValue != -SO_EINPROGRESS &&
ReturnValue != -SO_EALREADY) ||
(it->is_ssl && ReturnValue != SSL_ERR_WAGAIN && ReturnValue != SSL_ERR_RAGAIN))
{
DEBUG_LOG(WII_IPC_NET,
"IOCTL(V) Sock: %08x ioctl/v: %d returned: %d nonBlock: %d forceNonBlock: %d", fd,
it->is_ssl ? (int)it->ssl_type : (int)it->net_type, ReturnValue, nonBlock,
forceNonBlock);
WiiSockMan::EnqueueReply(it->_CommandAddress, ReturnValue, ct);
it = pending_sockops.erase(it);
}
else
{
++it;
}
}
}
void WiiSocket::DoSock(u32 _CommandAddress, NET_IOCTL type)
{
sockop so = {_CommandAddress, false};
so.net_type = type;
pending_sockops.push_back(so);
}
void WiiSocket::DoSock(u32 _CommandAddress, SSL_IOCTL type)
{
sockop so = {_CommandAddress, true};
so.ssl_type = type;
pending_sockops.push_back(so);
}
void WiiSockMan::AddSocket(s32 fd)
{
if (fd >= 0)
{
WiiSocket& sock = WiiSockets[fd];
sock.SetFd(fd);
}
}
s32 WiiSockMan::NewSocket(s32 af, s32 type, s32 protocol)
{
s32 fd = (s32)socket(af, type, protocol);
s32 ret = GetNetErrorCode(fd, "NewSocket", false);
AddSocket(ret);
return ret;
}
s32 WiiSockMan::DeleteSocket(s32 s)
{
auto socket_entry = WiiSockets.find(s);
s32 ReturnValue = socket_entry->second.CloseFd();
WiiSockets.erase(socket_entry);
return ReturnValue;
}
void WiiSockMan::Update()
{
s32 nfds = 0;
fd_set read_fds, write_fds, except_fds;
struct timeval t = {0, 0};
FD_ZERO(&read_fds);
FD_ZERO(&write_fds);
FD_ZERO(&except_fds);
auto socket_iter = WiiSockets.begin();
auto end_socks = WiiSockets.end();
while (socket_iter != end_socks)
{
WiiSocket& sock = socket_iter->second;
if (sock.IsValid())
{
FD_SET(sock.fd, &read_fds);
FD_SET(sock.fd, &write_fds);
FD_SET(sock.fd, &except_fds);
nfds = std::max(nfds, sock.fd + 1);
++socket_iter;
}
else
{
// Good time to clean up invalid sockets.
socket_iter = WiiSockets.erase(socket_iter);
}
}
s32 ret = select(nfds, &read_fds, &write_fds, &except_fds, &t);
if (ret >= 0)
{
for (auto& pair : WiiSockets)
{
WiiSocket& sock = pair.second;
sock.Update(FD_ISSET(sock.fd, &read_fds) != 0, FD_ISSET(sock.fd, &write_fds) != 0,
FD_ISSET(sock.fd, &except_fds) != 0);
}
}
else
{
for (auto& elem : WiiSockets)
{
elem.second.Update(false, false, false);
}
}
}
void WiiSockMan::EnqueueReply(u32 CommandAddress, s32 ReturnValue, IPCCommandType CommandType)
{
// The original hardware overwrites the command type with the async reply type.
Memory::Write_U32(IPC_REP_ASYNC, CommandAddress);
// IOS also seems to write back the command that was responded to in the FD field.
Memory::Write_U32(CommandType, CommandAddress + 8);
// Return value
Memory::Write_U32(ReturnValue, CommandAddress + 4);
WII_IPC_HLE_Interface::EnqueueReply(CommandAddress);
}
void WiiSockMan::Convert(WiiSockAddrIn const& from, sockaddr_in& to)
{
to.sin_addr.s_addr = from.addr.addr;
to.sin_family = from.family;
to.sin_port = from.port;
}
void WiiSockMan::Convert(sockaddr_in const& from, WiiSockAddrIn& to, s32 addrlen)
{
to.addr.addr = from.sin_addr.s_addr;
to.family = from.sin_family & 0xFF;
to.port = from.sin_port;
if (addrlen < 0 || addrlen > (s32)sizeof(WiiSockAddrIn))
to.len = sizeof(WiiSockAddrIn);
else
to.len = addrlen;
}
void WiiSockMan::UpdateWantDeterminism(bool want)
{
// If we switched into movie recording, kill existing sockets.
if (want)
Clean();
}
#undef ERRORCODE
#undef EITHER<|fim▁end|> | |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from collections import namedtuple
import configparser
from functools import lru_cache
import unittest
from unittest.mock import patch, mock_open
import transaction
import testing.postgresql
import webtest
import datetime
from pyramid.config import Configurator
from pyramid.paster import get_app
from sqlalchemy import create_engine
from sqlalchemy.exc import SAWarning
import test_project
import inspect
import os
import urllib
import warnings
import json
from parameterized import parameterized
import pyramid_jsonapi.metadata
from openapi_spec_validator import validate_spec
import pprint
import ltree
from pyramid_jsonapi.permissions import (
Permission,
Targets,
)
from test_project.models import (
DBSession,
Base,
Person,
Blog,
)
from test_project import test_data
cur_dir = os.path.dirname(
os.path.abspath(
inspect.getfile(inspect.currentframe())
)
)
parent_dir = os.path.dirname(cur_dir)
RelHalf = namedtuple('RelSide', 'collection rel many filters')
FilterInfo = namedtuple('FilterInfo', 'att op value')
RelInfo = namedtuple('RelInfo', 'src tgt comment')
rel_infos = (
RelInfo(
RelHalf('people', 'blogs', False, []),
RelHalf(
'blogs', 'owner', True,
[
FilterInfo('title', 'eq', 'owned by 11'),
],
),
'One to many',
),
RelInfo(
RelHalf('blogs', 'owner', True, []),
RelHalf(
'people', 'blogs', False,
[
FilterInfo('name', 'eq', 'one thing'),
]
),
'Many to one'
),
RelInfo(
RelHalf('people', 'articles_by_assoc', True, []),
RelHalf(
'articles_by_assoc', 'authors', True,
[
FilterInfo('title', 'eq', 'Collaborative one.')
]
),
'Many to many by association table'
),
RelInfo(
RelHalf('people', 'articles_by_proxy', True, []),
RelHalf(
'articles_by_obj', None, True,
[
FilterInfo('title', 'eq', 'Collaborative by obj one.')
]
),
'Many to many by association proxy'
),
)
class MyTestApp(webtest.TestApp):
def _check_status(self, status, res):
try:
super()._check_status(status, res)
except webtest.AppError as e:
errors = res.json_body.get('errors', [{}])
raise webtest.AppError(
'%s\n%s',
errors, res.json_body.get('traceback')
)
def setUpModule():
'''Create a test DB and import data.'''
# Create a new database somewhere in /tmp
global postgresql
global engine
postgresql = testing.postgresql.Postgresql(port=7654)
engine = create_engine(postgresql.url())
ltree.add_ltree_extension(engine)
DBSession.configure(bind=engine)
def tearDownModule():
'''Throw away test DB.'''
global postgresql
DBSession.close()
postgresql.stop()
def rels_doc_func(func, i, param):
src, tgt, comment = param[0]
return '{}:{}/{} ({})'.format(func.__name__, src.collection, src.rel, comment)
def make_ri(_type, _id):
return { 'type': _type, 'id': _id }
class DBTestBase(unittest.TestCase):
_test_app = None
@classmethod
def setUpClass(cls):
cls._test_app = cls.new_test_app()
def setUp(self):
Base.metadata.create_all(engine)
# Add some basic test data.
test_data.add_to_db(engine)
transaction.begin()
def tearDown(self):
transaction.abort()
Base.metadata.drop_all(engine)
def test_app(self, options=None):
if (options is None) and self._test_app:
# If there are no options and we have a cached app, return it.
return self._test_app
return self.new_test_app(options)
@staticmethod
def new_test_app(options=None):
'''Create a test app.'''
config_path = '{}/testing.ini'.format(parent_dir)
if options:
tmp_cfg = configparser.ConfigParser()
tmp_cfg.read(config_path)
tmp_cfg['app:main'].update(options or {})
config_path = '{}/tmp_testing.ini'.format(parent_dir)
with open(config_path, 'w') as tmp_file:
tmp_cfg.write(tmp_file)
with warnings.catch_warnings():
# Suppress SAWarning: about Property _jsonapi_id being replaced by
# Propery _jsonapi_id every time a new app is instantiated.
warnings.simplefilter(
"ignore",
category=SAWarning
)
app = get_app(config_path)
test_app = MyTestApp(app)
test_app._pj_app = app
if options:
os.remove(config_path)
return test_app
def evaluate_filter(self, att_val, op, test_val):
if op == 'eq':
return att_val == test_val
else:
raise Exception('Unkown filter op: {}'.format(op))
class TestTmp(DBTestBase):
'''To isolate tests so they can be run individually during development.'''
class TestPermissions(DBTestBase):
'''Test permission handling mechanisms.
'''
def test_get_alter_result_item(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to see alice (people/1)
pj.view_classes[test_project.models.Person].register_permission_filter(
['read'],
['alter_result'],
lambda obj, *args, **kwargs: obj.object.name != 'alice',
)
# Shouldn't be allowed to see people/1 (alice)
test_app.get('/people/1', status=403)
# Should be able to see people/2 (bob)
test_app.get('/people/2')
def test_get_alter_result_item_individual_attributes(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
def pfilter(obj, view, mask, *args, **kwargs):
if obj.object.name == 'alice':
return view.permission_object(subtract_attributes={'age',})
else:
return True
pj.view_classes[test_project.models.Person].register_permission_filter(
['get'],
['alter_result', ],
pfilter,
)
# Alice should have attribute 'name' but not 'age'.
alice = test_app.get('/people/1').json_body['data']
self.assertIn('name', alice['attributes'])
self.assertNotIn('age', alice['attributes'])
def test_get_alter_result_item_individual_rels(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
def pfilter(obj, view, target, **kwargs):
if obj.object.name == 'alice' and target.name == 'posts':
return False
else:
return True
pj.view_classes[test_project.models.Person].register_permission_filter(
['get'],
['alter_result', ],
pfilter,
target_types=(Targets.relationship,)
)
# Alice should have relationship 'blogs' but not 'posts'.
alice = test_app.get('/people/1').json_body['data']
self.assertIn('blogs', alice['relationships'])
self.assertNotIn('posts', alice['relationships'])
def test_get_alter_result_item_rel_ids(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to see blogs/1 (one of alice's 2 blogs)
pj.view_classes[test_project.models.Blog].register_permission_filter(
['get'],
['alter_result', ],
lambda obj, *args, **kwargs: obj.object.id != 1,
)
alice = test_app.get('/people/1').json_body['data']
alice_blogs = alice['relationships']['blogs']['data']
self.assertIn({'type': 'blogs', 'id': '2'}, alice_blogs)
self.assertNotIn({'type': 'blogs', 'id': '1'}, alice_blogs)
def test_get_alter_result_item_included_items(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to see blogs/1 (one of alice's 2 blogs)
pj.view_classes[test_project.models.Blog].register_permission_filter(
['get'],
['alter_result', ],
lambda obj, *args, **kwargs: obj.object.id != 1,
)
included = test_app.get('/people/1?include=blogs').json_body['included']
included_blogs = {
item['id'] for item in included if item['type'] == 'blogs'
}
self.assertNotIn('1', included_blogs)
self.assertIn('2', included_blogs)
def test_get_alter_result_collection(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to see alice (people/1)
pj.view_classes[test_project.models.Person].register_permission_filter(
['get'],
['alter_result', ],
lambda obj, *args, **kwargs: obj.object.name != 'alice',
)
# Make sure we get the lowest ids with a filter.
ret = test_app.get('/people?filter[id:lt]=3').json_body
people = ret['data']
ppl_ids = { person['id'] for person in people }
self.assertNotIn('1', ppl_ids)
self.assertIn('2', ppl_ids)
def test_get_alter_result_collection_meta_info(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to see alice (people/1)
pj.view_classes[test_project.models.Person].register_permission_filter(
['get'],
['alter_result', ],
lambda obj, *args, **kwargs: obj.object.name != 'alice',
)
# Make sure we get the lowest ids with a filter.
res = test_app.get('/people?filter[id:lt]=3').json_body
meta = res['meta']
self.assertIn('people::1', meta['rejected']['objects'])
def test_related_get_alter_result(self):
'''
'related' link should fetch only allowed related resource(s).
'''
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to see blog with title 'main: alice' (aka blogs/1)
pj.view_classes[test_project.models.Blog].register_permission_filter(
['get'],
['alter_result', ],
lambda obj, *args, **kwargs: obj.object.title != 'main: alice',
)
r = test_app.get('/people/1/blogs').json_body
data = r['data']
ids = {o['id'] for o in data}
self.assertIsInstance(data, list)
self.assertNotIn('1', ids)
# Not allowed to see alice (people/1)
pj.view_classes[test_project.models.Person].register_permission_filter(
['get'],
['alter_result', ],
lambda obj, *args, **kwargs: obj.object.name != 'alice',
)
r = test_app.get('/blogs/2/owner', status=403)
def test_post_alterreq_collection(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# Not allowed to post the name "forbidden"
def pfilter(obj, view, **kwargs):
return obj['attributes'].get('name') != 'forbidden'
pj.view_classes[test_project.models.Person].register_permission_filter(
['post'],
['alter_request'],
pfilter,
)
# Make sure we can't post the forbidden name.
test_app.post_json(
'/people',
{
'data': {
'type': 'people',
'attributes': {
'name': 'forbidden'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=403
)
# Make sure we can post some other name.
test_app.post_json(
'/people',
{
'data': {
'type': 'people',
'attributes': {
'name': 'allowed'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
)
def test_post_alterreq_collection_with_rels(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# def blogs_pfilter(obj, *args, **kwargs):
# return {'attributes': True, 'relationships': True}
# pj.view_classes[test_project.models.Blog].register_permission_filter(
# ['post'],
# ['alter_request'],
# blogs_pfilter,
# )
# /people: allow POST to all atts and to 3 relationships.
def people_pfilter(obj, view, target, **kwargs):
# if target.name == 'posts':
# print(obj['type'], obj['relationships']['posts'])
return view.permission_object(
True,
{'comments', 'articles_by_proxy', 'articles_by_assoc'}
)
pj.view_classes[test_project.models.Person].register_permission_filter(
['post'],
['alter_request'],
people_pfilter,
)
# /comments: allow PATCH (required to set 'comments.author') on all
# but comments/4.
pj.view_classes[test_project.models.Comment].register_permission_filter(
['patch'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'4'}
)
# /articles_by_assoc: allow POST (required to add people/new to
# 'articles_by_assoc.authors') on all but articles_by_assoc/11.
pj.view_classes[test_project.models.ArticleByAssoc].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'11'}
)
pj.view_classes[test_project.models.ArticleByObj].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'10'}
)
person_in = {
'data': {
'type': 'people',
'attributes': {
'name': 'post perms test'
},
'relationships': {
'posts': {
'data': [
{'type': 'posts', 'id': '20'},
{'type': 'posts', 'id': '21'}
]
},
'comments': {
'data': [
{'type': 'comments', 'id': '4'},
{'type': 'comments', 'id': '5'},
]
},
'articles_by_assoc': {
'data': [
{'type': 'articles_by_assoc', 'id': '10'},
{'type': 'articles_by_assoc', 'id': '11'},
]
},
'articles_by_proxy': {
'data': [
{'type': 'articles_by_obj', 'id': '10'},
{'type': 'articles_by_obj', 'id': '11'},
]
}
}
}
}
person_out = test_app.post_json(
'/people',
person_in,
headers={'Content-Type': 'application/vnd.api+json'},
).json_body['data']
rels = person_out['relationships']
self.assertEqual(len(rels['posts']['data']),0)
self.assertIn({'type': 'comments', 'id': '5'}, rels['comments']['data'])
self.assertNotIn({'type': 'comments', 'id': '4'}, rels['comments']['data'])
self.assertIn({'type': 'articles_by_assoc', 'id': '10'}, rels['articles_by_assoc']['data'])
self.assertNotIn({'type': 'articles_by_assoc', 'id': '11'}, rels['articles_by_assoc']['data'])
self.assertIn({'type': 'articles_by_obj', 'id': '11'}, rels['articles_by_proxy']['data'])
self.assertNotIn({'type': 'articles_by_obj', 'id': '10'}, rels['articles_by_proxy']['data'])
# Still need to test a to_one relationship. Posts has one of those.
# Switching to " for quoting so that the following can be copy/pasted as
# JSON in manual tests.
post_json = {
"data": {
"type": "posts",
"attributes": {
"title": "test"
},
"relationships": {
"author": {
"data": {"type": "people", "id": "10"}
},
"blog": {
"data": {"type": "blogs", "id": "10"}
}
}
}
}
# The Person permission filter defined above shouldn't allow us to POST
# post_json because we don't have permission to POST to Person.posts.
test_app.post_json(
'/posts',
post_json,
headers={'Content-Type': 'application/vnd.api+json'},
status=409 # this should probably be a different status.
)
# Replace the permission filter for Person - we need to be able to
# alter the Person.posts relationship.
pj.view_classes[test_project.models.Person].register_permission_filter(
['post'],
['alter_request'],
lambda *a, **kw: True,
)
post_out = test_app.post_json(
'/posts',
post_json,
headers={'Content-Type': 'application/vnd.api+json'},
)
def test_post_alterreq_relationship(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
def blogs_pfilter(obj, *args, **kwargs):
if obj['id'] == '12':
return False
else:
return True
pj.view_classes[test_project.models.Blog].register_permission_filter(
['patch'],
['alter_request'],
blogs_pfilter,
)
# /people: allow POST to all atts and to 3 relationships.
def people_pfilter(obj, view, permission, **kwargs):
if permission == 'delete' and obj['id'] == '20':
return False
if permission == 'post' and obj['id'] == '12':
return False
return view.permission_object(
True,
{'blogs', 'articles_by_proxy', 'articles_by_assoc'}
)
pj.view_classes[test_project.models.Person].register_permission_filter(
['post', 'delete'],
['alter_request'],
people_pfilter,
)
# /articles_by_assoc: allow POST (required to add people/new to
# 'articles_by_assoc.authors') on all but articles_by_assoc/11.
pj.view_classes[test_project.models.ArticleByAssoc].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'11'}
)
pj.view_classes[test_project.models.ArticleByObj].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'10'}
)
# ONETOMANY relationship.
out = test_app.post_json(
'/people/1/relationships/blogs',
{
'data': [
{'type': 'blogs', 'id': '10'},
{'type': 'blogs', 'id': '11'},
{'type': 'blogs', 'id': '12'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
).json_body
# pprint.pprint(out)
# Now fetch people/1 and see if the new blogs are there.
p1 = test_app.get('/people/1').json_body['data']
blogs = p1['relationships']['blogs']['data']
# Should have left the original blogs in place.
self.assertIn({'type': 'blogs', 'id': '1'}, blogs)
# Should have added blogs/10 (previously no owner)
self.assertIn({'type': 'blogs', 'id': '10'}, blogs)
# Should have added blogs/11 (previously owned by 11)
self.assertIn({'type': 'blogs', 'id': '11'}, blogs)
# blogs/12 disallowed by blogs filter.
self.assertNotIn({'type': 'blogs', 'id': '12'}, blogs)
# MANYTOMANY relationship.
out = test_app.post_json(
'/people/1/relationships/articles_by_assoc',
{
'data': [
{'type': 'articles_by_assoc', 'id': '10'},
{'type': 'articles_by_assoc', 'id': '11'},
{'type': 'articles_by_assoc', 'id': '12'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
).json_body
p1 = test_app.get('/people/1').json_body['data']
articles = p1['relationships']['articles_by_assoc']['data']
# Should have added articles_by_assoc/10
self.assertIn({'type': 'articles_by_assoc', 'id': '10'}, articles)
# articles_by_assoc/11 disallowed by articles_by_assoc filter.
self.assertNotIn({'type': 'articles_by_assoc', 'id': '11'}, articles)
# articles_by_assoc/12 disallowed by people filter.
# self.assertNotIn({'type': 'articles_by_assoc', 'id': '12'}, articles)
def test_patch_alterreq_item_with_rels(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
# /people: allow PATCH to all atts and to 3 relationships.
def people_pfilter(obj, view, **kwargs):
return view.permission_object(
True,
{'comments', 'articles_by_proxy', 'articles_by_assoc'}
)
pj.view_classes[test_project.models.Person].register_permission_filter(
['patch'],
['alter_request'],
people_pfilter,
)
# /comments: allow PATCH (required to set 'comments.author') on all
# but comments/4.
def comments_pfilter(obj, **kwargs):
if obj['id'] == '4' and obj['relationships']['author']['data']['id'] == '1':
# We're not allowing people/1 to be the author of comments/4 for
# some reason.
return False
return True
pj.view_classes[test_project.models.Comment].register_permission_filter(
['patch'],
['alter_request'],
comments_pfilter
)
# /articles_by_assoc: allow POST (required to add people/new to
# 'articles_by_assoc.authors') on all but articles_by_assoc/11.
pj.view_classes[test_project.models.ArticleByAssoc].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'11'}
)
pj.view_classes[test_project.models.ArticleByObj].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'11'}
)
person_in = {
'data': {
'type': 'people',
'id': '1',
'attributes': {
'name': 'post perms test'
},
'relationships': {
'posts': {
'data': [
{'type': 'posts', 'id': '1'},
{'type': 'posts', 'id': '2'},
{'type': 'posts', 'id': '3'},
{'type': 'posts', 'id': '20'},
]
},
'comments': {
'data': [
{'type': 'comments', 'id': '1'},
{'type': 'comments', 'id': '4'},
{'type': 'comments', 'id': '5'},
]
},
'articles_by_assoc': {
'data': [
{'type': 'articles_by_assoc', 'id': '10'},
{'type': 'articles_by_assoc', 'id': '11'},
]
},
'articles_by_proxy': {
'data': [
{'type': 'articles_by_obj', 'id': '1'},
{'type': 'articles_by_obj', 'id': '10'},
{'type': 'articles_by_obj', 'id': '11'},
]
}
}
}
}
test_app.patch_json(
'/people/1',
person_in,
headers={'Content-Type': 'application/vnd.api+json'},
)
person_out = test_app.get('/people/1').json_body['data']
rels = person_out['relationships']
# pprint.pprint(rels['posts']['data'])
# pprint.pprint(rels['comments']['data'])
# pprint.pprint(rels['articles_by_assoc']['data'])
# pprint.pprint(rels['articles_by_proxy']['data'])
# Still need to test a to_one relationship. Blogs has one of those.
def blogs_pfilter(obj, view, **kwargs):
if obj['id'] == '13':
# Not allowed to change blogs/13 at all.
return False
if obj['id'] == '10':
# Not allowed to set owner of blogs/10 to people/13
if obj['relationships']['owner']['data'].get('id') == '13':
# print('people/13 not allowed as owner of 10')
return view.permission_object(True, {'posts',})
if obj['id'] == '11':
# Not allowed to set owner of blogs/11 to None.
if obj['relationships']['owner']['data'] is None:
return view.permission_object(True, {'posts',})
return True
pj.view_classes[test_project.models.Blog].register_permission_filter(
['patch'],
['alter_request'],
blogs_pfilter
)
blog = {
'data': {
'type': 'blogs', 'id': None,
'relationships': {
'owner': {
'data': None
}
}
}
}
blog_owner = blog['data']['relationships']['owner']
# /blogs/10 is owned by no-one. Change owner to people/11. Should
# Have permission for this one.
ppl11 = make_ri('people', '11')
blog['data']['id'] = '10'
blog_owner['data'] = ppl11
self.assertNotEqual(
test_app.get('/blogs/10').json_body['data']['relationships']['owner']['data'],
ppl11
)
test_app.patch_json(
'/blogs/10',
blog,
headers={'Content-Type': 'application/vnd.api+json'},
)
self.assertEqual(
test_app.get('/blogs/10').json_body['data']['relationships']['owner']['data'],
ppl11
)
# Not allowed to set blogs/10.owner to people/13 though.
ppl13 = make_ri('people', '13')
blog_owner['data'] = ppl13
test_app.patch_json(
'/blogs/10',
blog,
headers={'Content-Type': 'application/vnd.api+json'},
)
self.assertNotEqual(
test_app.get('/blogs/10').json_body['data']['relationships']['owner']['data'],
ppl13
)
# Should be able to switch ownership of blogs/11 to people/12
ppl12 = make_ri('people', '12')
blog['data']['id'] = '11'
blog_owner['data'] = ppl12
test_app.patch_json(
'/blogs/11',
blog,
headers={'Content-Type': 'application/vnd.api+json'},
)
self.assertEqual(
test_app.get('/blogs/11').json_body['data']['relationships']['owner']['data'],
ppl12
)
# but not to None
blog_owner['data'] = None
test_app.patch_json(
'/blogs/11',
blog,
headers={'Content-Type': 'application/vnd.api+json'},
)
self.assertNotEqual(
test_app.get('/blogs/11').json_body['data']['relationships']['owner']['data'],
None
)
# Shouldn't be allowed to patch blogs/13 at all.
blog['data']['id'] = '13'
test_app.patch_json(
'/blogs/13',
blog,
headers={'Content-Type': 'application/vnd.api+json'},
status=403
)
def test_patch_alterreq_relationships(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
def people_pfilter(obj, view, **kwargs):
if obj['id'] == '1':
return False
if obj['id'] == '2':
return view.permission_object(True, False)
return True
pj.view_classes[test_project.models.Person].register_permission_filter(
['write'],
['alter_request'],
people_pfilter
)
def blogs_pfilter(obj, view, **kwargs):
if obj['id'] == '10':
# Not allowed to change blogs/10 at all.
return False
if obj['id'] == '11':
# Not allowed to set owner of blogs/11 to None.
if obj['relationships']['owner']['data'] is None:
return view.permission_object(True, {'posts',})
if obj['id'] == '12':
# Not allowed to set owner of blogs/12 to people/11
if obj['relationships']['owner']['data'].get('id') == '11':
return view.permission_object(True, {'posts',})
return True
pj.view_classes[test_project.models.Blog].register_permission_filter(
['write'],
['alter_request'],
blogs_pfilter
)
# ONETOMANY tests
# No permission to patch people/1 at all.
test_app.patch_json(
'/people/1/relationships/blogs',
{
'data': [
{'type': 'blogs', 'id': '10'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=403
)
# No permission to patch relationship of people/2.
test_app.patch_json(
'/people/2/relationships/blogs',
{
'data': [
{'type': 'blogs', 'id': '10'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=403
)
test_app.patch_json(
'/people/11/relationships/blogs',
{
'data': [
{'type': 'blogs', 'id': '10'},
{'type': 'blogs', 'id': '12'},
{'type': 'blogs', 'id': '13'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
blog_ids = [
b['id'] for b in
test_app.get('/people/11').json_body['data']['relationships']['blogs']['data']
]
# No permission to blogs/10
self.assertNotIn('10', blog_ids)
# No permission to set blogs/11.owner = people/11
self.assertNotIn('12', blog_ids)
# No permission to set blogs/11.owner = None
self.assertIn('11', blog_ids)
# Allowed to add blogs/13 :)
self.assertIn('13', blog_ids)
# MANYTOMANY tests
def articles_by_assoc_pfilter(obj, view, **kwargs):
if obj['id'] == '10':
# Not allowed to change articles_by_assoc/10 at all.
return False
if obj['id'] == '12':
# Not allowed to alter author of articles_by_assoc/12
return view.permission_object(True, False)
return True
pj.view_classes[test_project.models.ArticleByAssoc].register_permission_filter(
['post', 'delete'],
['alter_request'],
articles_by_assoc_pfilter
)
test_app.patch_json(
'/people/12/relationships/articles_by_assoc',
{
'data': [
{'type': 'articles_by_assoc', 'id': '10'},
{'type': 'articles_by_assoc', 'id': '1'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
article_ids = [
b['id'] for b in
test_app.get('/people/12').json_body['data']['relationships']['articles_by_assoc']['data']
]
# No permission to add 10
self.assertNotIn('10', article_ids)
# Permission to remove 13
self.assertNotIn('13', article_ids)
# No permission to remove 12
self.assertIn('12', article_ids)
# Permission to add 1
self.assertIn('1', article_ids)
def test_delete_alterreq_item(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
def comments_pfilter(obj, view, **kwargs):
if obj['id'] == '1':
return True
else:
return False
pj.view_classes[test_project.models.Comment].register_permission_filter(
['delete'],
['alter_request'],
comments_pfilter,
)
test_app.delete('/comments/1')
test_app.delete('/comments/2', status=403)
def test_delete_alterreq_relationship(self):
test_app = self.test_app({})
pj = test_app._pj_app.pj
def blogs_pfilter(obj, *args, **kwargs):
if obj['id'] == '12':
return False
else:
return True
pj.view_classes[test_project.models.Blog].register_permission_filter(
['patch'],
['alter_request'],
blogs_pfilter,
)
# /people: allow POST to all atts and to 3 relationships.
def people_pfilter(obj, view, permission, target, **kwargs):
rels = obj['relationships']
if target.name == 'blogs' and rels['blogs']['data'][0]['id'] == '1':
return False
else:
return True
pj.view_classes[test_project.models.Person].register_permission_filter(
['post', 'delete'],
['alter_request'],
people_pfilter,
)
# /articles_by_assoc: allow POST (required to add people/new to
# 'articles_by_assoc.authors') on all but articles_by_assoc/11.
pj.view_classes[test_project.models.ArticleByAssoc].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'11'}
)
pj.view_classes[test_project.models.ArticleByObj].register_permission_filter(
['post'],
['alter_request'],
lambda obj, *args, **kwargs: obj['id'] not in {'10'}
)
# ONETOMANY relationship.
out = test_app.delete_json(
'/people/1/relationships/blogs',
{
'data': [
{'type': 'blogs', 'id': '1'},
{'type': 'blogs', 'id': '2'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
).json_body
# pprint.pprint(out)
post_ids = [
b['id'] for b in
test_app.get('/people/1').json_body['data']['relationships']['blogs']['data']
]
self.assertIn('1', post_ids)
self.assertNotIn('2', post_ids)
class TestRelationships(DBTestBase):
'''Test functioning of relationsips.
'''
# Test data convention:
#
# src:10 -> undef or []
# src:11 -> tgt:11 or [tgt:11]
# src:12 -> [tgt:12, tgt:13]
###############################################
# Relationship GET tests.
###############################################
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_object(self, src, tgt, comment):
'''Relationships key should be object with a defined structure.
The value of the relationships key MUST be an object (a “relationships
object”). Members of the relationships object (“relationships”)
represent references from the resource object in which it’s defined to
other resource objects.
Relationships links object should have 'self' and 'related' links.
'''
# Fetch item 1 from the collection
r = self.test_app().get('/{}/1'.format(src.collection))
item = r.json['data']
# Should have relationships key
self.assertIn('relationships', item)
rels = item['relationships']
# The named relationship should exist.
self.assertIn(src.rel, rels)
# Check the structure of the relationship object.
obj = rels[src.rel]
self.assertIn('links', obj)
self.assertIn('self', obj['links'])
self.assertTrue(obj['links']['self'].endswith(
'{}/1/relationships/{}'.format(src.collection, src.rel)
))
self.assertIn('related', obj['links'])
self.assertTrue(obj['links']['related'].endswith(
'{}/1/{}'.format(src.collection, src.rel)
))
self.assertIn('data', obj)
if tgt.many:
self.assertIsInstance(obj['data'], list)
self.assertIn('type', obj['data'][0])
self.assertIn('id', obj['data'][0])
else:
self.assertIsInstance(obj['data'], dict)
self.assertIn('type', obj['data'])
self.assertIn('id', obj['data'])
def test_rel_many_to_many_self(self):
"""
Should get items from a self referential many to many relationship.
"""
test_app = self.test_app()
data = test_app.get("/jobs/1").json_body['data']
minions = {ri['id'] for ri in data['relationships']['minions']['data']}
bosses = {ri['id'] for ri in data['relationships']['bosses']['data']}
self.assertEqual(minions, {'2', '3'})
self.assertEqual(bosses, set())
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_related_get(self, src, tgt, comment):
''''related' link should fetch related resource(s).
If present, a related resource link MUST reference a valid URL, even if
the relationship isn’t currently associated with any target resources.
'''
# Fetch item 1 from the collection
r = self.test_app().get('/{}/1'.format(src.collection))
item = r.json['data']
# Fetch the related url.
url = item['relationships'][src.rel]['links']['related']
data = self.test_app().get(url).json['data']
# Check that the returned data is of the expected type.
if tgt.many:
self.assertIsInstance(data, list)
for related_item in data:
self.assertEqual(related_item['type'], tgt.collection)
else:
self.assertIsInstance(data, dict)
self.assertEqual(data['type'], tgt.collection)
def test_rels_related_get_no_relationship(self):
"""Should fail to get an invalid relationship."""
self.test_app().get('/blogs/1/no_such_relationship',
status=400,
)
def test_rels_related_get_no_object(self):
"""Should fail if 'parent' doesn't exist."""
self.test_app().get('/blogs/99999/owner',
status=400,
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_resource_linkage(self, src, tgt, comment):
'''Appropriate related resource identifiers in relationship.
Resource linkage in a compound document allows a client to link together
all of the included resource objects without having to GET any URLs via
links.
Resource linkage MUST be represented as one of the following:
* null for empty to-one relationships.
* an empty array ([]) for empty to-many relationships.
* a single resource identifier object for non-empty to-one
relationships.
* an array of resource identifier objects for non-empty to-many
relationships.
'''
# Test data convention:
#
# src:10 -> None or []
# src:11 -> tgt:11 or [tgt:11]
# src:12 -> [tgt:12, tgt:13]
# We always need items 10 and 11 from the source collection.
reldata_with_none = self.test_app().get(
'/{}/10'.format(src.collection)
).json['data']['relationships'][src.rel]['data']
reldata_with_one = self.test_app().get(
'/{}/11'.format(src.collection)
).json['data']['relationships'][src.rel]['data']
if tgt.many:
# Empty to_many relationship should hold [].
self.assertEqual(reldata_with_none, [])
# Should be an array with one item.
self.assertEqual(
reldata_with_one[0],
{'type': tgt.collection, 'id': '11'}
)
# We need item 12 for a to_many relationship.
# Note that we sort the list of related items so that they are in a
# known order for later testing.
reldata_with_two = sorted(
self.test_app().get(
'/{}/12'.format(src.collection)
).json['data']['relationships'][src.rel]['data'],
key=lambda item: item['id']
)
# Should be an array with two items.
self.assertEqual(
reldata_with_two[0], {'type': tgt.collection, 'id': '12'}
)
self.assertEqual(
reldata_with_two[1], {'type': tgt.collection, 'id': '13'}
)
else:
# Empty to_one relationship should hold None.
self.assertIsNone(reldata_with_none)
# Otherwise a single item {type: tgt_type, id: 11}.
self.assertEqual(reldata_with_one, {'type': tgt.collection, 'id': '11'})
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_fetch_relationship_link(self, src, tgt, comment):
'''relationships links should return linkage information.
A server MUST support fetching relationship data for every relationship
URL provided as a self link as part of a relationship’s links object
The primary data in the response document MUST match the appropriate
value for resource linkage, as described above for relationship objects.
If [a to-one] relationship is empty, then a GET request to the
[relationship] URL would return:
"data": null
If [a to-many] relationship is empty, then a GET request to the
[relationship] URL would return:
"data": []
'''
for item_id in ['10', '11', '12']:
url = self.test_app().get(
'/{}/{}'.format(src.collection, item_id)
).json['data']['relationships'][src.rel]['links']['self']
reldata = self.test_app().get(url).json['data']
if tgt.many:
if item_id == '10':
self.assertEqual(reldata, [])
elif item_id == '11':
self.assertEqual(reldata[0]['type'], tgt.collection)
self.assertEqual(reldata[0]['id'], '11')
else:
reldata.sort(key=lambda item: item['id'])
self.assertEqual(reldata[0]['type'], tgt.collection)
self.assertEqual(reldata[0]['id'], '12')
self.assertEqual(reldata[1]['type'], tgt.collection)
self.assertEqual(reldata[1]['id'], '13')
else:
if item_id == '10':
self.assertIsNone(reldata)
elif item_id == '11':
self.assertEqual(reldata['type'], tgt.collection)
self.assertEqual(reldata['id'], '11')
else:
continue
def test_rels_fetch_not_found_relationship(self):
'''Should 404 when fetching a relationship that does not exist.
A server MUST return 404 Not Found when processing a request to fetch a
relationship link URL that does not exist.
'''
# Try to get the author of a non existent post.
r = self.test_app().get('/posts/1000/relationships/author', status=404)
# Try to get data about a non existing relationships
self.test_app().get('/posts/1/relationships/no_such_relationship',
status=404)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_filter(self, src, tgt, comment):
'''
'''
for filter in tgt.filters:
json = self.test_app().get(
'/{}?filter[{}.{}:{}]={}&include={}'.format(
src.collection,
src.rel,
filter.att,
filter.op,
filter.value,
src.rel,
)
).json
#included = json['included']
included = {
(inc['type'], inc['id']): inc for inc in json['included']
}
# There should be at least one match.
self.assertGreater(len(included), 0)
items = json['data']
# For each returned item, there should be at least one related
# item which matches the filter.
for item in items:
res_ids = item['relationships'][src.rel]['data']
self.assertIsNotNone(res_ids)
if not tgt.many:
res_ids = [res_ids]
found_match = False
for res_id in res_ids:
relitem = included[(res_id['type'], res_id['id'])]
found_match = self.evaluate_filter(
relitem['attributes'][filter.att],
filter.op,
filter.value
)
if found_match:
break
self.assertTrue(found_match)
###############################################
# Relationship POST tests.
###############################################
def test_rels_post_no_such_relationship(self):
"""Should fail to create an invalid relationship."""
created_id = self.test_app().post_json(
'/blogs',
{
'data': {
'type': 'blogs',
'attributes': {
'title': 'test'
},
'relationships': {
'no_such_relationship': {
'data': {'type': 'people', 'id': '1'}
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
)
def test_rels_post_relationship_no_data(self):
"Relationships mentioned in POSTs must have data."
created_id = self.test_app(
options = {
'pyramid_jsonapi.schema_validation': 'false'
}
).post_json(
'/blogs',
{
'data': {
'type': 'blogs',
'attributes': {
'title': 'test'
},
'relationships': {
'owner': {}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_rels_post_relationship_no_id(self):
"Relationship linkage in POST requests must have id."
created_id = self.test_app(
options = {
'pyramid_jsonapi.schema_validation': 'false'
}
).post_json(
'/blogs',
{
'data': {
'type': 'blogs',
'attributes': {
'title': 'test'
},
'relationships': {
'owner': {
'data': {'type': 'people'}
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_post_to_relationships(self, src, tgt, comment):
'''Should add items to a TOMANY relationship; 403 Error for TOONE.
If a client makes a POST request to a URL from a relationship link, the
server MUST add the specified members to the relationship unless they
are already present. If a given type and id is already in the
relationship, the server MUST NOT add it again.
'''
if not tgt.many:
# Cannot POST to TOONE relationship. 403 Error.
self.test_app(
options = {
'pyramid_jsonapi.schema_validation': 'false'
}
).post_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{'type': tgt.collection, 'id': '11'},
headers={'Content-Type': 'application/vnd.api+json'},
status=403
)
return
# Add related items 12 and 13 to item 10 (has no related items).
self.test_app().post_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{ 'type': tgt.collection, 'id': '12'},
{ 'type': tgt.collection, 'id': '13'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# Make sure they are there.
rel_ids = {
rel_item['id'] for rel_item in
self.test_app().get(
'/{}/10/relationships/{}'.format(src.collection, src.rel)
).json['data']
}
self.assertEqual(rel_ids, {'12', '13'})
# Make sure adding relitem:12 again doesn't result in two relitem:12s
self.test_app().post_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{ 'type': tgt.collection, 'id': '12'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
rel_ids = [
rel_item['id'] for rel_item in
self.test_app().get(
'/{}/10/relationships/{}'.format(src.collection, src.rel)
).json['data']
]
self.assertEqual(sorted(rel_ids), ['12', '13'])
# Make sure adding relitem:11 adds to the list, rather than replacing
# it.
self.test_app().post_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{ 'type': tgt.collection, 'id': '11'},
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
rel_ids = [
rel_item['id'] for rel_item in
self.test_app().get(
'/{}/10/relationships/{}'.format(src.collection, src.rel)
).json['data']
]
self.assertEqual(sorted(rel_ids), ['11', '12', '13'])
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_post_item_with_related(self, src, tgt, comment):
'''Should add a new item with linkage to related resources.
If a relationship is provided in the relationships member of the
resource object, its value MUST be a relationship object with a data
member. The value of this key represents the linkage the new resource is
to have.
'''
# Add a new item related to relitem:12 and possibly relitem:13
reldata = {'type': tgt.collection, 'id': '12'}
if tgt.many:
reldata = [ reldata, {'type': tgt.collection, 'id': '13'} ]
item_id = self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'data': reldata
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'}
).json['data']['id']
# GET it back and check that relationship linkage is correct.
item = self.test_app().get(
'/{}/{}'.format(src.collection, item_id)
).json['data']
if tgt.many:
specified_related_ids = {'12', '13'}
found_related_ids = {
thing['id'] for thing in item['relationships'][src.rel]['data']
}
self.assertEqual(specified_related_ids, found_related_ids)
else:
self.assertEqual(item['relationships'][src.rel]['data']['id'], '12')
# Now attempt to add another item with malformed requests.
incorrect_type_data = { 'type': 'frogs', 'id': '12' }
no_id_data = { 'type': tgt.collection, 'id_typo': '12'}
# No data element in rel.
self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'meta': 'should fail'
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
if tgt.many:
incorrect_type_data = [ incorrect_type_data ]
no_id_data = [ no_id_data ]
# Not an array.
self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'data': { 'type': tgt.collection, 'id': '12'}
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
else:
# Data is an array of identifiers when it should be just one.
self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'data': [
{ 'type': tgt.collection, 'id': '12'}
]
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
# Data malformed (not a resource identifier or array of them).
self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'data': 'splat'
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
# Item with incorrect type.
self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'data': incorrect_type_data
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
# Item with no id.
self.test_app().post_json(
'/{}'.format(src.collection),
{
'data': {
'type': src.collection,
'relationships': {
src.rel: {
'data': no_id_data
}<|fim▁hole|> headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_rels_post_relationships_nonexistent_relationship(self):
'''Should return 404 error (relationship not found).
'''
# Try to add people/1 to no_such_relationship.
self.test_app().post_json(
'/articles_by_assoc/2/relationships/no_such_relationship',
{
'data': [
{ 'type': 'people', 'id': '1'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_post_relationships_nonexistent_item(self, src, tgt, comment):
'''Should return HTTPFailedDependency (424).
'''
# Try to add tgt/99999 (doesn't exist) to src.rel
reldata = { 'type': tgt.collection, 'id': '99999'}
status = 403
if tgt.many:
reldata = [ reldata ]
status = 424
self.test_app().post_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': reldata
},
headers={'Content-Type': 'application/vnd.api+json'},
status=status
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_spec_post_relationships_invalid_id(self, src, tgt, comments):
'''Should return HTTPBadRequest.
'''
if not tgt.many:
return
# Try to add item/splat to rel..
self.test_app().post_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{ 'type': tgt.collection, 'id': 'splat'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_rels_post_relationships_integrity_error(self):
'''Should return HTTPFailedDependency.
'''
# Try to add blog/1 to people/3 (db constraint precludes this)
self.test_app().post_json(
'/people/3/relationships/blogs',
{
'data': [
{ 'type': 'blogs', 'id': '1'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=424
)
###############################################
# Relationship PATCH tests.
###############################################
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_patch_resources_relationships(self, src, tgt, comment):
'''Should replace src.rel with new contents.
Any or all of a resource’s relationships MAY be included in the resource
object included in a PATCH request.
If a request does not include all of the relationships for a resource,
the server MUST interpret the missing relationships as if they were
included with their current values. It MUST NOT interpret them as null
or empty values.
If a relationship is provided in the relationships member of a resource
object in a PATCH request, its value MUST be a relationship object with
a data member. The relationship’s value will be replaced with the value
specified in this member.
'''
reldata = {'type': tgt.collection, 'id': '12'}
if tgt.many:
reldata = [ reldata, {'type': tgt.collection, 'id': '13'} ]
# PATCH src/10/rels/rel to be reldata
self.test_app().patch_json(
'/{}/10'.format(src.collection),
{
'data': {
'id': '10',
'type': src.collection,
'relationships': {
src.rel: {
'data': reldata
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# Check that src.rel has the correct linkage.
src_item = self.test_app().get('/{}/10'.format(src.collection)).json['data']
if tgt.many:
for related_item in src_item['relationships'][src.rel]['data']:
self.assertEqual(related_item['type'], tgt.collection)
self.assertIn(related_item['id'], {'12', '13'})
else:
self.assertEqual(src_item['relationships'][src.rel]['data'], reldata)
# Now try PATCHing the relationship back to empty
if tgt.many:
reldata = []
else:
reldata = None
self.test_app().patch_json(
'/{}/10'.format(src.collection),
{
'data': {
'id': '10',
'type': src.collection,
'relationships': {
src.rel: {
'data': reldata
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
)
src_item = self.test_app().get('/{}/10'.format(src.collection)).json['data']
self.assertEqual(src_item['relationships'][src.rel]['data'], reldata)
# MUST be a relationship object with a data member
# Try without a data member...
self.test_app().patch_json(
'/{}/10'.format(src.collection),
{
'data': {
'id': '10',
'type': src.collection,
'relationships': {
src.rel: reldata
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_patch_relationships(self, src, tgt, comment):
'''Should update a relationship.
A server MUST respond to PATCH requests to a URL from a to-one
relationship link as described below.
The PATCH request MUST include a top-level member named data containing
one of:
* a resource identifier object corresponding to the new related
resource.
* null, to remove the relationship.
If a client makes a PATCH request to a URL from a to-many relationship
link, the server MUST either completely replace every member of the
relationship, return an appropriate error response if some resources can
not be found or accessed, or return a 403 Forbidden response if complete
replacement is not allowed by the server.
'''
if tgt.many:
new_reldata = [
{ 'type': tgt.collection, 'id': '12'},
{ 'type': tgt.collection, 'id': '13'}
]
new_empty = []
else:
new_reldata = { 'type': tgt.collection, 'id': '12'}
new_empty = None
# src:11 should be related to tgt:11. Update the relationship.
self.test_app().patch_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': new_reldata
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# Check that the change went through
fetched_reldata = self.test_app().get(
'/{}/10/relationships/{}'.format(src.collection, src.rel)
).json['data']
if tgt.many:
expected_length = 2
expected_ids = {'12', '13'}
else:
# Wrap to_one results in an array to make the following code DRY.
fetched_reldata = [ fetched_reldata ]
expected_length = 1
expected_ids = {'12'}
fetched_reldata.sort(key=lambda item: item['id'])
self.assertEqual(len(fetched_reldata), expected_length)
for relitem in fetched_reldata:
self.assertEqual(relitem['type'], tgt.collection)
self.assertIn(relitem['id'], expected_ids)
# Update the relationship to be empty.
self.test_app().patch_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': new_empty
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# Check that it's empty.
self.assertEqual(
self.test_app().get(
'/{}/10/relationships/{}'.format(src.collection, src.rel)
).json['data'],
new_empty
)
def test_rels_patch_relationships_nonexistent_relationship(self):
'''Should return 404 error (relationship not found).
'''
# Try set people/1 on no_such_relationship.
self.test_app().patch_json(
'/articles_by_assoc/2/relationships/no_such_relationship',
{
'data': [
{ 'type': 'people', 'id': '1'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_patch_relationships_nonexistent_item(self, src, tgt, comment):
'''Should return HTTPFailedDependency.
'''
reldata = { 'type': tgt.collection, 'id': '99999' }
if tgt.many:
reldata = [ reldata ]
self.test_app().patch_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': reldata
},
headers={'Content-Type': 'application/vnd.api+json'},
status=424
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_patch_relationships_invalid_id(self, src, tgt, comment):
'''Should return HTTPBadRequest.
'''
reldata = { 'type': tgt.collection, 'id': 'splat' }
if tgt.many:
reldata = [ reldata ]
self.test_app().patch_json(
'/{}/10/relationships/{}'.format(src.collection, src.rel),
{
'data': reldata
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_rels_patch_relationships_integrity_error(self):
'''Should return HTTPFailedDependency.
'''
# Try to add blog/1 to people/3 (db constraint precludes this)
self.test_app().patch_json(
'/people/3/relationships/blogs',
{
'data': [
{ 'type': 'blogs', 'id': '1'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=424
)
# and the other way round
self.test_app().patch_json(
'/blogs/1/relationships/owner',
{
'data': { 'type': 'people', 'id': '3'}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=424
)
###############################################
# Relationship DELETE tests.
###############################################
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_delete_relationships(self, src, tgt, comment):
'''Should remove items from relationship.
If the client makes a DELETE request to a URL from a relationship link
the server MUST delete the specified members from the relationship or
return a 403 Forbidden response. If all of the specified resources are
able to be removed from, or are already missing from, the relationship
then the server MUST return a successful response
'''
if not tgt.many:
# DELETEing from a to_one relationship is not allowed.
self.test_app().delete(
'/{}/11/relationships/{}'.format(src.collection, src.rel),
status=403
)
return
# Attempt to delete tgt:13 from src:12
self.test_app().delete_json(
'/{}/12/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{'type': tgt.collection, 'id': '13'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# Test that tgt:13 is no longer in the relationship.
self.assertEqual(
{'12'},
{
item['id'] for item in
self.test_app().get(
'/{}/12/relationships/{}'.format(src.collection, src.rel)
).json['data']
}
)
# Try to DELETE tgt:13 from relationship again. Should return success.
self.test_app().delete_json(
'/{}/12/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{'type': tgt.collection, 'id': '13'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
)
self.assertEqual(
{'12'},
{
item['id'] for item in
self.test_app().get(
'/{}/12/relationships/{}'.format(src.collection, src.rel)
).json['data']
}
)
def test_rels_delete_relationships_nonexistent_relationship(self):
'''Should return 404 error (relationship not found).
'''
# Delete people/1 from no_such_relationship.
self.test_app().delete_json(
'/articles_by_assoc/2/relationships/no_such_relationship',
{
'data': [
{ 'type': 'people', 'id': '1'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_delete_relationships_nonexistent_item(self, src, tgt, comment):
'''Should return HTTPFailedDependency.
'''
if not tgt.many:
return
self.test_app().delete_json(
'/{}/11/relationships/{}'.format(src.collection, src.rel),
{
'data': [ { 'type': tgt.collection, 'id': '99999' } ]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=424
)
@parameterized.expand(rel_infos, doc_func=rels_doc_func)
def test_rels_delete_relationships_invalid_id(self, src, tgt, comment):
'''Should return HTTPBadRequest.
'''
if not tgt.many:
return
# Try to delete tgt:splat from src:11.
self.test_app().delete_json(
'/{}/11/relationships/{}'.format(src.collection, src.rel),
{
'data': [
{ 'type': tgt.collection, 'id': 'splat'}
]
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_adjacancy_list(self):
'''Should correctly identify parent and children for TreeNode.
'''
top = self.test_app().get('/treenodes/1').json
top_1 = self.test_app().get('/treenodes/2').json
# top should have no parent.
self.assertIsNone(top['data']['relationships']['parent']['data'])
# top should have multiple children.
self.assertIsInstance(top['data']['relationships']['children']['data'], list)
# top_1 should have top as a parent.
self.assertEqual(
top_1['data']['relationships']['parent']['data'],
{'type': 'treenodes', 'id': '1'}
)
# top_1 should have 2 children.
self.assertIsInstance(top_1['data']['relationships']['children']['data'], list)
self.assertEqual(len(top_1['data']['relationships']['children']['data']), 2)
class TestSpec(DBTestBase):
'''Test compliance against jsonapi spec.
http://jsonapi.org/format/
'''
###############################################
# GET tests.
###############################################
def test_spec_server_content_type(self):
'''Response should have correct content type.
Servers MUST send all JSON API data in response documents with the
header Content-Type: application/vnd.api+json without any media type
parameters.
'''
# Fetch a representative page
r = self.test_app().get('/people')
self.assertEqual(r.content_type, 'application/vnd.api+json')
def test_spec_incorrect_client_content_type(self):
'''Server should return error if we send media type parameters.
Servers MUST respond with a 415 Unsupported Media Type status code if a
request specifies the header Content-Type: application/vnd.api+json with
any media type parameters.
'''
r = self.test_app().get(
'/people',
headers={ 'Content-Type': 'application/vnd.api+json; param=val' },
status=415,
)
def test_spec_accept_not_acceptable(self):
'''Server should respond with 406 if all jsonapi media types have parameters.
Servers MUST respond with a 406 Not Acceptable status code if a
request's Accept header contains the JSON API media type and all
instances of that media type are modified with media type parameters.
'''
# Should work with correct accepts header.
r = self.test_app().get(
'/people',
headers={ 'Accept': 'application/vnd.api+json' },
)
# 406 with one incorrect type.
r = self.test_app().get(
'/people',
headers={ 'Accept': 'application/vnd.api+json; param=val' },
status=406,
)
# 406 with more than one type but none without params.
r = self.test_app().get(
'/people',
headers={ 'Accept': 'application/vnd.api+json; param=val,' +
'application/vnd.api+json; param2=val2' },
status=406,
)
def test_spec_toplevel_must(self):
'''Server response must have one of data, errors or meta.
A JSON object MUST be at the root of every JSON API request and response
containing data.
A document MUST contain at least one of the following top-level members:
* data: the document's “primary data”
* errors: an array of error objects
* meta: a meta object that contains non-standard meta-information.
'''
# Should be object with data member.
r = self.test_app().get('/people')
self.assertIn('data', r.json)
# Should also have a meta member.
self.assertIn('meta', r.json)
# Should be object with an array of errors.
r = self.test_app().get(
'/people',
headers={ 'Content-Type': 'application/vnd.api+json; param=val' },
status=415,
)
self.assertIn('errors', r.json)
self.assertIsInstance(r.json['errors'], list)
def test_spec_get_no_such_item(self):
'''Should fail to get non-existent comments/99999
A server MUST respond with 404 Not Found when processing a request
to fetch a single resource that does not exist
'''
# Get comments/99999
self.test_app().get('/comments/99999', status=404)
def test_spec_get_invalid_item(self):
'''Should fail to get invalid item comments/cat
A server MUST respond with 404 Not Found when processing a request
to fetch a single resource that does not exist
'''
# Get comments/cat
self.test_app().get('/comments/cat', status=404)
def test_spec_get_primary_data_empty(self):
'''Should return an empty list of results.
Primary data MUST be either:
* ...or an empty array ([])
A logical collection of resources MUST be represented as an array, even
if it... is empty.
'''
r = self.test_app().get('/people?filter[name:eq]=doesnotexist')
self.assertEqual(len(r.json['data']), 0)
def test_spec_get_primary_data_array(self):
'''Should return an array of resource objects.
Primary data MUST be either:
* an array of resource objects, an array of resource identifier
objects, or an empty array ([]), for requests that target resource
collections
'''
# Data should be an array of person resource objects or identifiers.
r = self.test_app().get('/people')
self.assertIn('data', r.json)
self.assertIsInstance(r.json['data'], list)
item = r.json['data'][0]
def test_spec_get_primary_data_array_of_one(self):
'''Should return an array of one resource object.
A logical collection of resources MUST be represented as an array, even
if it only contains one item...
'''
r = self.test_app().get('/people?page[limit]=1')
self.assertIn('data', r.json)
self.assertIsInstance(r.json['data'], list)
self.assertEqual(len(r.json['data']), 1)
def test_spec_get_primary_data_single(self):
'''Should return a single resource object.
Primary data MUST be either:
* a single resource object, a single resource identifier object, or
null, for requests that target single resources
'''
# Find the id of alice.
r = self.test_app().get('/people?filter[name:eq]=alice')
item = r.json['data'][0]
self.assertEqual(item['attributes']['name'], 'alice')
alice_id = item['id']
# Now get alice object.
r = self.test_app().get('/people/' + alice_id)
alice = r.json['data']
self.assertEqual(alice['attributes']['name'], 'alice')
def test_spec_resource_object_must(self):
'''Resource object should have at least id and type.
A resource object MUST contain at least the following top-level members:
* id
* type
The values of the id and type members MUST be strings.
'''
r = self.test_app().get('/people?page[limit]=1')
item = r.json['data'][0]
# item must have at least a type and id.
self.assertEqual(item['type'], 'people')
self.assertIn('id', item)
self.assertIsInstance(item['type'], str)
self.assertIsInstance(item['id'], str)
def test_spec_resource_object_should(self):
'''Fetched resource should have attributes, relationships, links, meta.
a resource object MAY contain any of these top-level members:
* attributes: an attributes object representing some of the
resource’s data.
* relationships: a relationships object describing relationships
between the resource and other JSON API resources.
* links: a links object containing links related to the resource.
* meta: a meta object containing non-standard meta-information about
a resource that can not be represented as an attribute or
relationship.
'''
r = self.test_app().get('/people?page[limit]=1')
item = r.json['data'][0]
self.assertIn('attributes', item)
#self.assertIn('relationships', item)
self.assertIn('links', item)
#self.assertIn('meta', item)
def test_spec_type_id_identify_resource(self):
'''Using type and id should fetch a single resource.
Within a given API, each resource object’s type and id pair MUST
identify a single, unique resource.
'''
# Find the id of alice.
r = self.test_app().get('/people?filter[name:eq]=alice')
item = r.json['data'][0]
self.assertEqual(item['attributes']['name'], 'alice')
alice_id = item['id']
# Search for alice by id. We should get one result whose name is alice.
r = self.test_app().get('/people?filter[id:eq]={}'.format(alice_id))
self.assertEqual(len(r.json['data']), 1)
item = r.json['data'][0]
self.assertEqual(item['id'], alice_id)
self.assertEqual(item['attributes']['name'], 'alice')
def test_spec_attributes(self):
'''attributes key should be an object.
The value of the attributes key MUST be an object (an “attributes
object”). Members of the attributes object (“attributes”) represent
information about the resource object in which it’s defined.
'''
# Fetch a single post.
r = self.test_app().get('/posts?page[limit]=1')
item = r.json['data'][0]
# Check attributes.
self.assertIn('attributes', item)
atts = item['attributes']
self.assertIn('title', atts)
self.assertIn('content', atts)
self.assertIn('published_at', atts)
def test_spec_no_foreign_keys(self):
'''No foreign keys in attributes.
Although has-one foreign keys (e.g. author_id) are often stored
internally alongside other information to be represented in a resource
object, these keys SHOULD NOT appear as attributes.
'''
# posts have author_id and blog_id as has-one foreign keys. Check that
# they don't make it into the JSON representation (they should be in
# relationships instead).
# Fetch a single post.
r = self.test_app().get('/posts?page[limit]=1')
item = r.json['data'][0]
# Check for foreign keys.
self.assertNotIn('author_id', item['attributes'])
self.assertNotIn('blog_id', item['attributes'])
def test_spec_links_self(self):
''''self' link should fetch same object.
The optional links member within each resource object contains links
related to the resource.
If present, this links object MAY contain a self link that identifies
the resource represented by the resource object.
A server MUST respond to a GET request to the specified URL with a
response that includes the resource as the primary data.
'''
person = self.test_app().get('/people/1').json['data']
# Make sure we got the expected person.
self.assertEqual(person['type'], 'people')
self.assertEqual(person['id'], '1')
# Now fetch the self link.
person_again = self.test_app().get(person['links']['self']).json['data']
# Make sure we got the same person.
self.assertEqual(person_again['type'], 'people')
self.assertEqual(person_again['id'], '1')
def test_spec_included_array(self):
'''Included resources should be in an array under 'included' member.
In a compound document, all included resources MUST be represented as an
array of resource objects in a top-level included member.
'''
person = self.test_app().get('/people/1?include=blogs').json
self.assertIsInstance(person['included'], list)
# Each item in the list should be a resource object: we'll look for
# type, id and attributes.
for blog in person['included']:
self.assertIn('id', blog)
self.assertEqual(blog['type'], 'blogs')
self.assertIn('attributes', blog)
def test_spec_bad_include(self):
'''Should 400 error on attempt to fetch non existent relationship path.
If a server is unable to identify a relationship path or does not
support inclusion of resources from a path, it MUST respond with 400 Bad
Request.
'''
# Try to include a relationship that doesn't exist.
r = self.test_app().get('/people/1?include=frogs', status=400)
def test_spec_nested_include(self):
'''Should return includes for nested resources.
In order to request resources related to other resources, a
dot-separated path for each relationship name can be specified:
* GET /articles/1?include=comments.author
'''
r = self.test_app().get('/people/1?include=comments.author')
people_seen = set()
types_expected = {'people', 'comments'}
types_seen = set()
for item in r.json['included']:
# Shouldn't see any types other than comments and people.
self.assertIn(item['type'], types_expected)
types_seen.add(item['type'])
# We should only see people 1, and only once.
if item['type'] == 'people':
self.assertNotIn(item['id'], people_seen)
people_seen.add(item['id'])
# We should have seen at least one of each type.
self.assertIn('people', types_seen)
self.assertIn('comments', types_seen)
def test_spec_multiple_include(self):
'''Should return multiple related resource types.
Multiple related resources can be requested in a comma-separated list:
* GET /articles/1?include=author,comments.author
'''
# TODO(Colin) implement
def test_spec_compound_full_linkage(self):
'''All included resources should be referenced by a resource link.
Compound documents require "full linkage", meaning that every included
resource MUST be identified by at least one resource identifier object
in the same document. These resource identifier objects could either be
primary data or represent resource linkage contained within primary or
included resources.
'''
# get a person with included blogs and comments.
person = self.test_app().get('/people/1?include=blogs,comments').json
# Find all the resource identifiers.
rids = set()
for rel in person['data']['relationships'].values():
if isinstance(rel['data'], list):
for item in rel['data']:
rids.add((item['type'], item['id']))
else:
rids.add((rel['data']['type'], rel['data']['id']))
# Every included item should have an identifier somewhere.
for item in person['included']:
type_ = item['type']
id_ = item['id']
self.assertIn((type_, id_), rids)
def test_spec_compound_no_linkage_sparse(self):
'''Included resources not referenced if referencing field not included.
The only exception to the full linkage requirement is when relationship
fields that would otherwise contain linkage data are excluded via sparse
fieldsets.
'''
person = self.test_app().get(
'/people/1?include=blogs&fields[people]=name,comments'
).json
# Find all the resource identifiers.
rids = set()
for rel in person['data']['relationships'].values():
for item in rel['data']:
rids.add((item['type'], item['id']))
self.assertGreater(len(person['included']), 0)
for blog in person['included']:
self.assertEqual(blog['type'], 'blogs')
def test_spec_compound_unique_resources(self):
'''Each resource object should appear only once.
A compound document MUST NOT include more than one resource object for
each type and id pair.
'''
# get some people with included blogs and comments.
people = self.test_app().get('/people?include=blogs,comments').json
# Check that each resource only appears once.
seen = set()
# Add the main resource objects.
for person in people['data']:
self.assertNotIn((person['type'], person['id']), seen)
seen.add((person['type'], person['id']))
# Check the included resources.
for obj in people['included']:
self.assertNotIn((obj['type'], obj['id']), seen)
seen.add((obj['type'], obj['id']))
def test_spec_links(self):
'''Links should be an object with URL strings.
Where specified, a links member can be used to represent links. The
value of each links member MUST be an object (a "links object").
Each member of a links object is a “link”. A link MUST be represented as
either:
* a string containing the link’s URL.
* an object ("link object") which can contain the following members:
* href: a string containing the link’s URL.
* meta: a meta object containing non-standard meta-information
about the link.
Note: only URL string links are currently generated by jsonapi.
'''
links = self.test_app().get('/people?pj_include_count=1').json['links']
self.assertIsInstance(links['self'], str)
self.assertIsInstance(links['first'], str)
self.assertIsInstance(links['last'], str)
def test_spec_fetch_non_existent(self):
'''Should 404 when fetching non existent resource.
A server MUST respond with 404 Not Found when processing a request to
fetch a single resource that does not exist,
'''
r = self.test_app().get('/people/1000', status=404)
def test_spec_fetch_non_existent_related(self):
'''Should return primary data of null, not 404.
null is only an appropriate response when the requested URL is one that
might correspond to a single resource, but doesn’t currently.
'''
data = self.test_app().get('/comments/5/author').json['data']
self.assertIsNone(data)
def test_spec_sparse_fields(self):
'''Should return only requested fields.
A client MAY request that an endpoint return only specific fields in the
response on a per-type basis by including a fields[TYPE] parameter.
The value of the fields parameter MUST be a comma-separated (U+002C
COMMA, ",") list that refers to the name(s) of the fields to be
returned.
If a client requests a restricted set of fields for a given resource
type, an endpoint MUST NOT include additional fields in resource objects
of that type in its response.
'''
# Ask for just the title, content and author fields of a post.
r = self.test_app().get('/posts/1?fields[posts]=title,content,author')
data = r.json['data']
atts = data['attributes']
self.assertEqual(len(atts), 2)
self.assertIn('title', atts)
self.assertIn('content', atts)
rels = data['relationships']
self.assertEqual(len(rels), 1)
self.assertIn('author', rels)
def test_spec_empty_fields(self):
"""should return no attributes."""
person = self.test_app().get(
'/people?fields[people]='
).json
self.assertEqual(len(person['data'][0]['attributes']), 0)
def test_spec_single_sort(self):
'''Should return collection sorted by correct field.
An endpoint MAY support requests to sort the primary data with a sort
query parameter. The value for sort MUST represent sort fields.
* GET /people?sort=age
'''
data = self.test_app().get('/posts?sort=content').json['data']
prev = ''
for item in data:
self.assertGreaterEqual(item['attributes']['content'], prev)
prev = item['attributes']['content']
def test_spec_related_sort(self):
'''Should return collection sorted by related field.
Note: It is recommended that dot-separated (U+002E FULL-STOP, “.”) sort
fields be used to request sorting based upon relationship attributes.
For example, a sort field of author.name could be used to request that
the primary data be sorted based upon the name attribute of the author
relationship.
'''
res = self.test_app().get('/posts?sort=author.name')
data = res.json['data']
prev = ''
for item in data:
# author_name is a hybrid attribute that just happens to have
# author.name in it.
self.assertGreaterEqual(item['attributes']['author_name'], prev)
prev = item['attributes']['author_name']
def test_spec_multiple_sort(self):
'''Should return collection sorted by multiple fields, applied in order.
An endpoint MAY support multiple sort fields by allowing comma-separated
(U+002C COMMA, ",") sort fields. Sort fields SHOULD be applied in the
order specified.
* GET /people?sort=age,name
'''
data = self.test_app().get('/posts?sort=content,id').json['data']
prev_content = ''
prev_id = 0
for item in data:
self.assertGreaterEqual(
item['attributes']['content'],
prev_content
)
if item['attributes']['content'] != prev_content:
prev_id = 0
self.assertGreaterEqual(int(item['id']), prev_id)
prev_content = item['attributes']['content']
prev_id = int(item['id'])
def test_spec_descending_sort(self):
'''Should return results sorted by field in reverse order.
The sort order for each sort field MUST be ascending unless it is
prefixed with a minus (U+002D HYPHEN-MINUS, "-"), in which case it MUST
be descending.
* GET /articles?sort=-created,title
'''
data = self.test_app().get('/posts?sort=-content').json['data']
prev = 'zzz'
for item in data:
self.assertLessEqual(item['attributes']['content'], prev)
prev = item['attributes']['content']
# TODO(Colin) repeat sort tests for other collection returning endpoints,
# because: Note: This section applies to any endpoint that responds with a
# resource collection as primary data, regardless of the request type
def test_spec_pagination_links(self):
'''Should provide correct pagination links.
A server MAY provide links to traverse a paginated data set ("pagination
links").
Pagination links MUST appear in the links object that corresponds to a
collection. To paginate the primary data, supply pagination links in the
top-level links object. To paginate an included collection returned in a
compound document, supply pagination links in the corresponding links
object.
The following keys MUST be used for pagination links:
* first: the first page of data
* last: the last page of data
* prev: the previous page of data
* next: the next page of data
'''
json = self.test_app().get(
'/posts?pj_include_count=1&page[limit]=2&page[offset]=2'
).json
self.assertEqual(len(json['data']), 2)
self.assertIn('first', json['links'])
self.assertIn('last', json['links'])
self.assertIn('prev', json['links'])
self.assertIn('next', json['links'])
def test_spec_pagination_unavailable_links(self):
'''Next page link should not be available
Keys MUST either be omitted or have a null value to indicate that a
particular link is unavailable.
'''
r = self.test_app().get('/posts?pj_include_count=1&page[limit]=1')
available = r.json['meta']['results']['available']
json = self.test_app().get(
'/posts?pj_include_count=1&page[limit]=2&page[offset]=' + str(available - 2)
).json
self.assertEqual(len(json['data']), 2)
self.assertNotIn('next', json['links'])
def test_spec_negative_offset(self):
"""Offset must not be negative"""
self.test_app().get('/posts?page[offset]=-1', status=400)
def test_spec_negative_limit(self):
"""Limit must not be negative"""
self.test_app().get('/posts?page[limit]=-1', status=400)
def test_spec_pagination_order(self):
'''Pages (and results) should order restults as per order param.
Concepts of order, as expressed in the naming of pagination links, MUST
remain consistent with JSON API’s sorting rules.
'''
data = self.test_app().get(
'/posts?page[limit]=4&sort=content&fields[posts]=content'
).json['data']
self.assertEqual(len(data), 4)
prev = ''
for item in data:
self.assertGreaterEqual(item['attributes']['content'], prev)
prev = item['attributes']['content']
# TODO(Colin) repeat sort tests for other collection returning endpoints,
# because: Note: This section applies to any endpoint that responds with a
# resource collection as primary data, regardless of the request type
def test_spec_filterop_eq(self):
'''Should return collection with just the alice people object.
The filter query parameter is reserved for filtering data. Servers and
clients SHOULD use this key for filtering operations.
'''
data = self.test_app().get('/people?filter[name:eq]=alice').json['data']
self.assertEqual(len(data), 1)
self.assertEqual(data[0]['type'], 'people')
self.assertEqual(data[0]['attributes']['name'], 'alice')
def test_spec_filterop_ne(self):
'''Should return collection of people whose name is not alice.'''
data = self.test_app().get('/people?filter[name:ne]=alice').json['data']
for item in data:
try:
errors = item['meta']['errors']
except KeyError:
self.assertNotEqual('alice', item['attributes']['name'])
def test_spec_filterop_startswith(self):
'''Should return collection where titles start with "post1".'''
data = self.test_app().get(
'/posts?filter[title:startswith]=post1'
).json['data']
for item in data:
self.assertTrue(item['attributes']['title'].startswith('post1'))
def test_spec_filterop_endswith(self):
'''Should return collection where titles end with "main".'''
data = self.test_app().get(
'/posts?filter[title:endswith]=main'
).json['data']
for item in data:
self.assertTrue(item['attributes']['title'].endswith('main'))
def test_spec_filterop_contains(self):
'''Should return collection where titles contain "bob".'''
data = self.test_app().get(
'/posts?filter[title:contains]=bob'
).json['data']
for item in data:
self.assertTrue('bob' in item['attributes']['title'])
def test_spec_filterop_lt(self):
'''Should return posts with published_at less than 2015-01-03.'''
data = self.test_app().get(
'/posts?filter[published_at:lt]=2015-01-03'
).json['data']
ref_date = datetime.datetime(2015,1,3)
for item in data:
#TODO(Colin) investigate more robust way of parsing date.
date = datetime.datetime.strptime(
item['attributes']['published_at'],
"%Y-%m-%dT%H:%M:%S"
)
self.assertLess(date, ref_date)
def test_spec_filterop_gt(self):
'''Should return posts with published_at greater than 2015-01-03.'''
data = self.test_app().get(
'/posts?filter[published_at:gt]=2015-01-03'
).json['data']
ref_date = datetime.datetime(2015,1,3)
for item in data:
#TODO(Colin) investigate more robust way of parsing date.
date = datetime.datetime.strptime(
item['attributes']['published_at'],
"%Y-%m-%dT%H:%M:%S"
)
self.assertGreater(date, ref_date)
def test_spec_filterop_le(self):
'''Should return posts with published_at <= 2015-01-03.'''
data = self.test_app().get(
'/posts?filter[published_at:le]=2015-01-03'
).json['data']
ref_date = datetime.datetime(2015,1,3)
for item in data:
#TODO(Colin) investigate more robust way of parsing date.
date = datetime.datetime.strptime(
item['attributes']['published_at'],
"%Y-%m-%dT%H:%M:%S"
)
self.assertLessEqual(date, ref_date)
def test_spec_filterop_ge(self):
'''Should return posts with published_at >= 2015-01-03.'''
data = self.test_app().get(
'/posts?filter[published_at:ge]=2015-01-03'
).json['data']
ref_date = datetime.datetime(2015,1,3)
for item in data:
#TODO(Colin) investigate more robust way of parsing date.
date = datetime.datetime.strptime(
item['attributes']['published_at'],
"%Y-%m-%dT%H:%M:%S"
)
self.assertGreaterEqual(date, ref_date)
def test_spec_filterop_like(self):
'''Should return collection where content matches "*thing*".'''
data = self.test_app().get(
'/posts?filter[content:like]=*thing*'
).json['data']
for item in data:
self.assertTrue('thing' in item['attributes']['content'])
def test_spec_filterop_ilike(self):
'''Should return collection where content case insensitive matches "*thing*".'''
data = self.test_app().get(
'/posts?filter[content:ilike]=*THING*'
).json['data']
for item in data:
self.assertTrue('thing' in item['attributes']['content'])
def test_spec_filterop_json_contains(self):
'''Should return collection where json_content contains {"b": 2}.'''
data = self.test_app().get(
'/posts?filter[json_content:contains]={"b": 2}'
).json['data']
for item in data:
self.assertIn('b', item['attributes']['json_content'])
def test_spec_filterop_json_contained_by(self):
'''Should return collection where json_content contained by expression.'''
containing_expr = '{"a":1, "b": 2, "c": 3}'
containing_json = json.loads(containing_expr)
data = self.test_app().get(
'/posts?filter[json_content:contained_by]={}'.format(containing_expr)
).json['data']
for item in data:
for key in item['attributes']['json_content']:
self.assertIn(key, containing_json)
def test_spec_filter_related_property(self):
'''Should return collection of posts with author.name=alice.'''
data = self.test_app().get('/posts?filter[author.name:eq]=alice').json['data']
for item in data:
self.assertEqual(item['attributes']['author_name'], 'alice')
###############################################
# POST tests.
###############################################
def test_spec_post_invalid_json(self):
'''Invalid json should raise an error.'''
# Send garbage json
self.test_app().post(
'/people',
'{,,,}',
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_spec_post_no_data_attribute(self):
'''Missing data attribute in json should raise an error.'''
# Send minimal json with no data attribute
self.test_app().post(
'/people',
'{"meta": {}}',
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_spec_post_data_not_item(self):
'''Missing data attribute in json should raise an error.'''
# Send minimal json with no data attribute
self.test_app().post(
'/people',
'{"data": []}',
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_spec_post_collection(self):
'''Should create a new person object.'''
# Make sure there is no test person.
data = self.test_app().get('/people?filter[name:eq]=test').json['data']
self.assertEqual(len(data),0)
# Try adding a test person.
self.test_app().post_json(
'/people',
{
'data': {
'type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'}
)
# Make sure they are there.
data = self.test_app().get('/people?filter[name:eq]=test').json['data']
self.assertEqual(len(data),1)
def test_spec_post_collection_no_attributes(self):
'''Should create a person with no attributes.'''
self.test_app().post_json(
'/people',
{
'data': {
'type': 'people',
}
},
headers={'Content-Type': 'application/vnd.api+json'}
)
def test_spec_post_must_have_type(self):
'''type must be specified.
Note: The type member is required in every resource object throughout
requests and responses in JSON API. There are some cases, such as when
POSTing to an endpoint representing heterogenous data, when the type
could not be inferred from the endpoint. However, picking and choosing
when it is required would be confusing; it would be hard to remember
when it was required and when it was not. Therefore, to improve
consistency and minimize confusion, type is always required.
'''
self.test_app().post_json(
'/people',
{
'data': {
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_spec_post_with_id(self):
'''Should create a person object with id 1000.
A server MAY accept a client-generated ID along with a request to create
a resource. An ID MUST be specified with an id key. The client SHOULD
use a properly generated and formatted UUID as described in RFC 4122
If a POST request did not include a Client-Generated ID and the
requested resource has been created successfully, the server MUST return
a 201 Created status code.
The response SHOULD include a Location header identifying the location
of the newly created resource.
The response MUST also include a document that contains the primary
resource created.
If the resource object returned by the response contains a self key in
its links member and a Location header is provided, the value of the
self member MUST match the value of the Location header.
Comment: jsonapi.allow_client_ids is set in the ini file, so we should
be able to create objects with ids. The id strategy in test_project
isn't RFC4122 UUID, but we're not enforcing that since there may be
other globally unique id strategies in use.
'''
r = self.test_app().post_json(
'/people',
{
'data': {
'id': '1000',
'type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=201 # Test the status code.
)
# Test for Location header.
location = r.headers.get('Location')
self.assertIsNotNone(location)
# Test that json is a resource object
data = r.json['data']
self.assertEqual(data['id'],'1000')
self.assertEqual(data['type'],'people')
self.assertEqual(data['attributes']['name'], 'test')
# Test that the Location header and the self link match.
self.assertEqual(data['links']['self'], location)
def test_spec_post_with_id_disallowed(self):
'''Should 403 when attempting to create object with id.
A server MUST return 403 Forbidden in response to an unsupported request
to create a resource with a client-generated ID.
'''
# We need a test_app with different settings.
test_app = self.test_app(
options={'pyramid_jsonapi.allow_client_ids': 'false'}
)
res = test_app.post_json(
'/people',
{
'data': {
'id': '1000',
'type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=403
)
def test_spec_post_with_id_conflicts(self):
'''Should 409 if id exists.
A server MUST return 409 Conflict when processing a POST request to
create a resource with a client-generated ID that already exists.
'''
self.test_app().post_json(
'/people',
{
'data': {
'id': '1',
'type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409 # Test the status code.
)
def test_spec_post_type_conflicts(self):
'''Should 409 if type conflicts with endpoint.
A server MUST return 409 Conflict when processing a POST request in
which the resource object’s type is not among the type(s) that
constitute the collection represented by the endpoint.
'''
self.test_app().post_json(
'/people',
{
'data': {
'id': '1000',
'type': 'frogs',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409 # Test the status code.
)
###############################################
# PATCH tests.
###############################################
def test_spec_patch(self):
'''Should change alice's name to alice2'''
# Patch alice.
self.test_app().patch_json(
'/people/1',
{
'data': {
'id': '1',
'type': 'people',
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# Fetch alice back...
data = self.test_app().get('/people/1').json['data']
# ...should now be alice2.
self.assertEqual(data['attributes']['name'], 'alice2')
def test_spec_patch_invalid_json(self):
'''Invalid json should raise an error.'''
# Send garbage json
self.test_app().patch(
'/people/1',
'{,,,}',
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_spec_patch_no_type_id(self):
'''Should 409 if id or type do not exist.
The PATCH request MUST include a single resource object as primary data.
The resource object MUST contain type and id members.
'''
# No id.
self.test_app().patch_json(
'/people/1',
{
'data': {
'type': 'people',
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
# No type.
self.test_app().patch_json(
'/people/1',
{
'data': {
'id': '1',
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
# No type or id.
self.test_app().patch_json(
'/people/1',
{
'data': {
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
def test_spec_patch_integrity_error(self):
'''Should 409 if PATCH violates a server side constraint.
A server MAY return 409 Conflict when processing a PATCH request to
update a resource if that update would violate other server-enforced
constraints (such as a uniqueness constraint on a property other than
id).
'''
self.test_app().patch_json(
'/blogs/1',
{
'data': {
'id': '1',
'type': 'blogs',
'attributes': {
'title': 'forbidden title'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
def test_spec_patch_item_on_success(self):
'''Should return a representation of the patched object.
If a server accepts an update but also changes the resource(s) in ways
other than those specified by the request (for example, updating the
updated-at attribute or a computed sha), it MUST return a 200 OK
response. The response document MUST include a representation of the
updated resource(s) as if a GET request was made to the request URL
'''
json = self.test_app().patch_json(
'/people/1',
{
'data': {
'id': '1',
'type': 'people',
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
).json
self.assertIn('meta',json)
self.assertEqual(json['data']['type'], 'people')
self.assertEqual(json['data']['id'], '1')
def notused__spec_patch_empty_success(self):
'''Should return only meta, not data or links.
A server MUST return a 200 OK status code if an update is successful,
the client’s current attributes remain up to date, and the server
responds only with top-level meta data. In this case the server MUST NOT
include a representation of the updated resource(s).
'''
json = self.test_app().patch_json(
'/people/1',
{
'data': {
'id': '1',
'type': 'people',
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
).json
self.assertIn('meta',json)
self.assertEqual(len(json),1)
def test_spec_patch_nonexistent(self):
'''Should 404 when patching non existent resource.
A server MUST return 404 Not Found when processing a request to modify a
resource that does not exist.
'''
self.test_app().patch_json(
'/people/1000',
{
'data': {
'id': '1000',
'type': 'people',
'attributes': {
'name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
)
# Patching non existent attribute
detail = self.test_app().patch_json(
'/people/1',
{
'data': {
'type': 'people',
'id': '1',
'attributes': {
'non_existent': 'splat'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
).json['errors'][0]['detail']
self.assertIn('has no attribute',detail)
# Patching non existent relationship
detail = self.test_app().patch_json(
'/people/1',
{
'data': {
'type': 'people',
'id': '1',
'attributes': {
'name': 'splat'
},
'relationships': {
'non_existent': {
'data': None
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=404
).json['errors'][0]['detail']
self.assertIn('has no relationship',detail)
###############################################
# DELETE tests.
###############################################
def test_spec_delete_item(self):
'''Should delete comments/5
An individual resource can be deleted by making a DELETE request to the
resource’s URL
'''
# Check that comments/5 exists.
self.test_app().get('/comments/5')
# Delete comments/5.
self.test_app().delete('/comments/5')
# Check that comments/5 no longer exists.
self.test_app().get('/comments/5', status=404)
def test_spec_delete_no_such_item(self):
'''Should fail to delete non-existent comments/99999
A server SHOULD return a 404 Not Found status code if
a deletion request fails due to the resource not existing.
'''
# Delete comments/99999.
self.test_app().delete('/comments/99999', status=404)
def test_spec_delete_invalid_item(self):
'''Should fail to delete non-existent comments/invalid
A server SHOULD return a 404 Not Found status code if
a deletion request fails due to the resource not existing.
'''
# Delete comments/invalid
self.test_app().delete('/comments/invalid', status=404)
class TestErrors(DBTestBase):
'''Test that errors are thrown properly.'''
###############################################
# Error tests.
###############################################
def test_errors_structure(self):
'''Errors should be array of objects with code, title, detail members.'''
r = self.test_app().get(
'/people',
headers={ 'Content-Type': 'application/vnd.api+json; param=val' },
status=415,
)
self.assertIn('errors', r.json)
self.assertIsInstance(r.json['errors'], list)
err = r.json['errors'][0]
self.assertIn('code', err)
self.assertIn('title', err)
self.assertIn('detail', err)
def test_errors_only_controlled_paths(self):
'''Error handlers only for controlled paths ('api' and 'metadata')'''
app = self.test_app(
options={'pyramid_jsonapi.route_pattern_api_prefix': 'api'}
)
# Both /api/ and /metadata/ should have json structured errors
for path in ('/api/', '/metadata/'):
json = app.get(path, status=404).json
# Other paths should not have json errors
for path in ('/', '/splat/', '/api_extra/'):
r = app.get(path, status=404)
self.assertRaises(AttributeError, getattr, r, 'json')
def test_errors_composite_key(self):
'''Should raise exception if a model has a composite key.'''
self.assertRaisesRegex(
Exception,
r'^Model \S+ has more than one primary key.$',
self.test_app,
{'pyramid_jsonapi_tests.models_iterable': 'composite_key'}
)
class TestMalformed(DBTestBase):
'''Various malformed POSTs and PATCHes.'''
def test_malformed_collection_post_not_single_item(self):
'''Should complain about data being a list.'''
self.test_app().post_json(
'/people',
{'type': 'people', 'data': []},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_malformed_collection_post_no_data(self):
'''Should complain about lack of data attribute.'''
self.test_app().post_json(
'/people',
{'type': 'people'},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_malformed_item_patch_no_data(self):
'''Should complain about lack of data attribute.'''
self.test_app().patch_json(
'/people/1',
{'type': 'people', 'id': '1'},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_no_filter_operator_defaults_to_eq(self):
'''Missing filter operator should behave as 'eq'.'''
r = self.test_app().get('/people?filter[name:eq]=alice')
op = r.json['data'][0]
r = self.test_app().get('/people?filter[name]=alice')
noop = r.json['data'][0]
self.assertEqual(op, noop)
def test_malformed_filter_unregistered_operator(self):
'''Unkown filter operator should raise 400 BadRequest.'''
self.test_app().get(
'/people?filter[name:not_an_op]=splat',
status=400
)
def test_malformed_filter_bad_operator(self):
'''Known filter with no comparator should raise 500 InternalServerError.'''
self.test_app().get(
'/people?filter[name:bad_op]=splat',
status=500
)
def test_malformed_filter_unknown_column(self):
'''Unkown column should raise 400 BadRequest.'''
self.test_app().get(
'/people?filter[unknown_column:eq]=splat',
status=400
)
class TestHybrid(DBTestBase):
'''Test cases for @hybrid_property attributes.'''
def test_hybrid_readonly_get(self):
'''Blog object should have owner_name attribute.'''
atts = self.test_app().get(
'/blogs/1'
).json['data']['attributes']
self.assertIn('owner_name', atts)
self.assertEqual(atts['owner_name'], 'alice')
def test_hybrid_readonly_patch(self):
'''Updating owner_name should fail with 409.'''
self.test_app().patch_json(
'/blogs/1',
{
'data': {
'id': '1',
'type': 'blogs',
'attributes': {
'owner_name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
def test_hybrid_writeable_patch(self):
'''Should be able to update author_name of Post object.'''
# Patch post 1 and change author_name to 'alice2'
r = self.test_app().patch_json(
'/posts/1',
{
'data': {
'id': '1',
'type': 'posts',
'attributes': {
'author_name': 'alice2'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
)
# author_name should be in the list of updated attributes.
self.assertIn('author_name', r.json['meta']['updated']['attributes'])
# Fetch alice back...
data = self.test_app().get('/people/1').json['data']
# ...should now be called alice2.
self.assertEqual(data['attributes']['name'], 'alice2')
class TestHybridRelationships(DBTestBase):
'''Test cases for @hybrid_property relationships.'''
def test_hybrid_rel_to_one_get(self):
'''Post should have a relationship called blog_owner'''
data = self.test_app().get('/posts/1').json['data']
# Should have a relationship called blog_owner.
self.assertIn('blog_owner', data['relationships'])
# But not an attribute
self.assertNotIn('blog_owner', data['attributes'])
self.assertEqual(
data['relationships']['blog_owner']['data'],
{'type': 'people', 'id': '1'}
)
def test_hybrid_rel_to_many_get(self):
'''Blog should have a relationship called posts_authors'''
data = self.test_app().get('/blogs/1').json['data']
# Should have a relationship called posts_authors.
self.assertIn('posts_authors', data['relationships'])
# But not an attribute
self.assertNotIn('posts_authors', data['attributes'])
self.assertEqual(
data['relationships']['posts_authors']['data'],
[{'type': 'people', 'id': '1'}]
)
class TestJoinedTableInheritance(DBTestBase):
'''Test cases for sqlalchemy joined table inheritance pattern.'''
def test_joined_benign_create_fetch(self):
'''Should create BenignComment with author people/1 and then fetch it.'''
content = 'Main content.'
fawning_text = 'You are so great.'
created = self.test_app().post_json(
'/benign_comments',
{
'data': {
'type': 'benign_comments',
'attributes': {
'content': content,
'fawning_text': fawning_text
},
'relationships': {
'author': {
'data': {'type': 'people', 'id': '1'}
}
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=201
).json['data']
# Fetch the object back
fetched = self.test_app().get(
'/benign_comments/{}'.format(created['id'])
).json['data']
self.assertEqual(fetched['attributes']['content'], content)
self.assertEqual(
fetched['attributes']['fawning_text'],
fawning_text
)
self.assertEqual(fetched['relationships']['author']['data']['id'],'1')
class TestFeatures(DBTestBase):
'''Test case for features beyond spec.'''
def test_feature_invisible_column(self):
'''people object should not have attribute "invisible".'''
atts = self.test_app().get(
'/people/1'
).json['data']['attributes']
self.assertNotIn('invisible', atts)
self.assertNotIn('invisible_hybrid', atts)
def test_feature_invisible_relationship(self):
'''people object should not have relationship "invisible_comments".'''
rels = self.test_app().get(
'/people/1'
).json['data']['relationships']
self.assertNotIn('invisible_comments', rels)
def test_feature_rename_collection(self):
'''Should be able to fetch from whatsits even though table is things.'''
# There should be whatsits...
self.test_app().get('/whatsits')
# ...but not things.
self.test_app().get('/things', status=404)
def test_feature_construct_with_models_list(self):
'''Should construct an api from a list of models.'''
test_app = self.test_app(
options={'pyramid_jsonapi_tests.models_iterable': 'list'}
)
test_app.get('/blogs/1')
def test_feature_debug_endpoints(self):
'''Should create a set of debug endpoints for manipulating the database.'''
test_app = self.test_app(
options={
'pyramid_jsonapi.debug_endpoints': 'true',
'pyramid_jsonapi.debug_test_data_module': 'test_project.test_data'
}
)
test_app.get('/debug/populate')
def test_feature_disable_schema_validation(self):
'''Should disable schema validation.'''
# Create an app without schema validation.
test_app = self.test_app(
options = {
'pyramid_jsonapi.schema_validation': 'false'
}
)
# Schema validation produces 400 without 'type', without validation we
# get 409 (Unsupported type None)
test_app.post_json(
'/people',
{
'data': {
'not_type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=409
)
def test_feature_alternate_schema_file(self):
'''Should load alternate schema file.'''
test_app = self.test_app(
options={'pyramid_jsonapi.schema_file': '{}/test-alt-schema.json'.format(parent_dir)}
)
test_app.post_json(
'/people',
{
'data': {
'not_type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'},
status=400
)
def test_feature_debug_meta(self):
'''Should add meta information.'''
test_app = self.test_app(
options={'pyramid_jsonapi.debug_meta': 'true'}
)
self.assertIn('debug',test_app.get('/people/1').json['meta'])
def test_feature_expose_foreign_keys(self):
"""Should return blog with owner_id."""
test_app = self.test_app(
options={'pyramid_jsonapi.expose_foreign_keys': 'true'}
)
self.assertIn('owner_id', test_app.get('/blogs/1').json['data']['attributes'])
class TestBugs(DBTestBase):
def test_19_last_negative_offset(self):
'''last link should not have negative offset.
#19: 'last' link has negative offset if zero results are returned
'''
# Need an empty collection: use a filter that will not match.
last = self.test_app().get(
'/posts?pj_include_count=1&filter[title:eq]=frog'
).json['links']['last']
offset = int(
urllib.parse.parse_qs(
urllib.parse.urlparse(last).query
)['page[offset]'][0]
)
self.assertGreaterEqual(offset, 0)
def test_20_non_string_id(self):
'''Creating single object should not result in integer id.
#20: creating single object returns non string id
'''
data = self.test_app().post_json(
'/people',
{
'data': {
'type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'}
).json['data']
self.assertIsInstance(data['id'], str)
def test_56_post_with_non_id_primary_key(self):
'''POST to model with non 'id' primary key should work.
#56: POSTing a new item where the primary key column is not 'id' causes
an error.
'''
data = self.test_app().post_json(
'/comments',
{
'data': {
'id': '1000',
'type': 'comments',
'attributes': {
'content': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'}
).json['data']
self.assertEqual(data['id'],'1000')
def test_association_proxy(self):
'''Should treat association proxy as a relationship.'''
data = self.test_app().get('/people/1').json['data']
self.assertIn('articles_by_proxy', data['relationships'])
def test_175_head_method(self):
'''Should produce OK for HEAD request.'''
self.test_app().head('/people/1')
class TestEndpoints(DBTestBase):
"""Tests for endpoint configuration."""
def test_api_prefix(self):
"""Test setting api prefix."""
self.test_app(
options={
'pyramid_jsonapi.route_pattern_api_prefix': 'api'
}).get('/api/people')
def test_metadata_endpoints_disable(self):
self.test_app(
options={
'pyramid_jsonapi.metadata_endpoints': 'false'
}).get('/metadata/JSONSchema', status=404)
def test_api_version(self):
"""Test setting api version."""
self.test_app(
options={
'pyramid_jsonapi.api_version': '10'
}).get('/10/people')
self.test_app(
options={
'pyramid_jsonapi.api_version': '10'
}).get('/10/metadata/JSONSchema')
def test_route_pattern_prefix(self):
"""Test setting route_pattern_prefix."""
self.test_app(
options={
'pyramid_jsonapi.route_pattern_prefix': 'SPLAT'
}).get('/SPLAT/people')
self.test_app(
options={
'pyramid_jsonapi.route_pattern_prefix': 'SPLAT'
}).get('/SPLAT/metadata/JSONSchema')
def test_route_pattern_prefix_error(self):
"""Test setting route_pattern_prefix error handling."""
resp = self.test_app(
options={
'pyramid_jsonapi.route_pattern_prefix': 'SPLAT'
}).get('/SPLAT/invalid',
status=404)
self.assertTrue(resp.content_type == 'application/vnd.api+json')
def test_api_version(self):
"""Test setting api_version."""
self.test_app(
options={
'pyramid_jsonapi.api_version': 'v1',
}).get('/v1/people')
def test_api_version_error(self):
"""Test setting api_version error handling."""
resp = self.test_app(
options={
'pyramid_jsonapi.api_version': 'v1',
}).get('/v1/invalid',
status=404)
self.assertTrue(resp.content_type == 'application/vnd.api+json')
def test_route_pattern_api_prefix(self):
"""Test setting route_pattern_api_prefix."""
self.test_app(
options={
'pyramid_jsonapi.route_pattern_api_prefix': 'API'
}).get('/API/people')
def test_route_pattern_api_prefix_error(self):
"""Test setting route_pattern_prefix error handling."""
resp = self.test_app(
options={
'pyramid_jsonapi.route_pattern_api_prefix': 'API'
}).get('/API/invalid',
status=404)
self.assertTrue(resp.content_type == 'application/vnd.api+json')
def test_route_pattern_metadata_prefix(self):
"""Test setting route_pattern_metadata_prefix."""
self.test_app(
options={
'pyramid_jsonapi.route_pattern_metadata_prefix': 'METADATA'
}).get('/METADATA/JSONSchema')
def test_route_pattern_metadata_prefix_error(self):
"""Test setting route_pattern_prefix error handling."""
resp = self.test_app(
options={
'pyramid_jsonapi.route_pattern_metadata_prefix': 'METADATA'
}).get('/METADATA/invalid',
status=404)
self.assertTrue(resp.content_type == 'application/vnd.api+json')
def test_route_pattern_all_prefixes(self):
"""Test setting all pattern prefixes."""
api = self.test_app(
options={
'pyramid_jsonapi.route_pattern_prefix': 'SPLAT',
'pyramid_jsonapi.api_version': 'v1',
'pyramid_jsonapi.route_pattern_api_prefix': 'API',
'pyramid_jsonapi.route_pattern_metadata_prefix': 'METADATA'
})
api.get('/SPLAT/v1/API/people')
api.get('/SPLAT/v1/METADATA/JSONSchema')
def test_route_pattern_all_prefixes_error(self):
"""Test setting all pattern prefixes error handling."""
api = self.test_app(
options={
'pyramid_jsonapi.route_pattern_prefix': 'SPLAT',
'pyramid_jsonapi.api_version': 'v1',
'pyramid_jsonapi.route_pattern_api_prefix': 'API',
'pyramid_jsonapi.route_pattern_metadata_prefix': 'METADATA'
})
self.assertEqual(
api.get('/SPLAT/v1/API/invalid', status=404).content_type,
'application/vnd.api+json'
)
self.assertEqual(
api.get('/SPLAT/v1/METADATA/invalid', status=404).content_type,
'application/vnd.api+json'
)
class TestMetaData(DBTestBase):
"""Tests for the metadata plugins."""
@classmethod
def setUpClass(cls):
"""Setup metadata plugins."""
super().setUpClass()
config = Configurator()
cls.api = pyramid_jsonapi.PyramidJSONAPI(config, [])
cls.api.create_jsonapi()
cls.metadata = pyramid_jsonapi.metadata.MetaData(cls.api)
def test_no_jsonschema_module(self):
"""Test how things break if jsonschema is disabled."""
self.test_app(
options={
'pyramid_jsonapi.metadata_modules': ''
}).post('/people', '{}', status=500)
self.test_app(
options={
'pyramid_jsonapi.metadata_modules': ''
}).get('/metadata/JSONSchema', '{}', status=404)
def test_disable_jsonschema_validation(self):
"""Test disabling jsonschema and validation together works."""
self.test_app(
options={
'pyramid_jsonapi.metadata_modules': '',
'pyramid_jsonapi.schema_validation': 'false',
}).post_json(
'/people',
{
'data': {
'type': 'people',
'attributes': {
'name': 'test'
}
}
},
headers={'Content-Type': 'application/vnd.api+json'}
)
def test_jsonschema_template(self):
"""Test that template() returns valid json, and as a view."""
dir_tmpl = json.dumps(self.metadata.JSONSchema.template())
view_tmpl = self.test_app().get('/metadata/JSONSchema', '{}').json
def test_jsonschema_load_schema_file(self):
"""Test loading jsonschema from file."""
path = "/tmp/nosuchfile.json"
schema = {"test": "true"}
self.api.settings.schema_file = path
with patch("builtins.open", mock_open(read_data=json.dumps(schema))) as mock_file:
self.metadata.JSONSchema.load_schema()
mock_file.assert_called_with(path)
self.assertDictEqual(schema, self.metadata.JSONSchema.schema)
def test_jsonschema_resource_attributes_view(self):
"""Test that resource_attributes view returns valid json."""
self.test_app().get('/metadata/JSONSchema/resource/people', status=200).json
def test_jsonschema_resource_attributes_view_not_found(self):
"""Test that view returns 404 for non-existent endpoint."""
self.test_app().get('/metadata/JSONSchema/resource/invalid', status=404)
def test_jsonschema_endpoint_schema_view(self):
"""Check that endpoint_schema returns json with appropriate query params."""
self.test_app().get('/metadata/JSONSchema/endpoint/people',
params='method=get&direction=request&code=200',
status=200).json
self.test_app().get('/metadata/JSONSchema/endpoint/people',
params='method=get&direction=response&code=200',
status=200).json
def test_jsonschema_endpoint_schema_view_failure_schema(self):
"""Test that a reference to the failure schema is returned for code=4xx."""
res = self.test_app().get('/metadata/JSONSchema/endpoint/people',
params='method=get&direction=response&code=404',
status=200).json
self.assertEqual(res, {"$ref" : "#/definitions/failure"})
def test_jsonschema_endpoint_schema_view_bad_params(self):
"""Test that 400 returned if missing/bad query params specified."""
self.test_app().get('/metadata/JSONSchema/endpoint/people', status=400).json
self.test_app().get('/metadata/JSONSchema/endpoint/people', params='cat=1', status=400).json
def test_jsonschema_endpoint_schema_view_not_found(self):
self.test_app().get('/metadata/JSONSchema/endpoint/invalid',
params='method=get&direction=request&code=200',
status=404).json
def test_jsonschema_invalid_schema(self):
"""Invalid schema mappings generate empty resource attrs."""
# posts has JSONB field
res = self.test_app().get('/metadata/JSONSchema/resource/posts').json
self.assertEqual(res, {})
def test_openapi_swagger_ui_view(self):
"""Test that swagger_ui view returns html."""
html = self.test_app().get('/metadata/OpenAPI', status=200).html
def test_openapi_specification_view(self):
"""Test that specification view returns valid json."""
self.test_app().get('/metadata/OpenAPI/specification', status=200).json
# def test_openapi_specification_valid(self):
# """Test that the openapi specification returned is valid."""
# validate_spec(self.test_app().get('/metadata/OpenAPI/specification', status=200).json)
# print(json.dumps(self.test_app().get('/metadata/OpenAPI/specification', status=200).json, indent=4))
def test_openapi_file(self):
"""Test providing openapi spec updates in a file."""
path = os.path.dirname(os.path.realpath(__file__))
res = self.test_app(
options={
'pyramid_jsonapi.openapi_file': os.path.join(path, 'test-openapi.json'),
}).get('/metadata/OpenAPI/specification', status=200).json
# Check that openapi file merge has overridden version string
self.assertEqual("999", res['openapi'])
if __name__ == "__main__":
unittest.main()<|fim▁end|> | }
}
}, |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from os import environ
# if you set a property in SESSION_CONFIG_DEFAULTS, it will be inherited by all configs
# in SESSION_CONFIGS, except those that explicitly override it.
# the session config can be accessed from methods in your apps as self.session.config,
# e.g. self.session.config['participation_fee']
SESSION_CONFIG_DEFAULTS = {
'real_world_currency_per_point': 1.00,
'participation_fee': 0.00,
'doc': "",
}
SESSION_CONFIGS = [
{
'name': 'dill_resp_punish_first',
'display_name': "Dilution de responsabilité, Punish First",
'num_demo_participants': 12,
'app_sequence': ['dill_resp'],
'treatment_order': 'punish_first'
},
{
'name': 'dill_resp_punish_last',
'display_name': "Dilution de responsabilité, Punish Last",
'num_demo_participants': 12,
'app_sequence': ['dill_resp'],
'treatment_order': 'punish_last'
},
]
# ISO-639 code
# for example: de, fr, ja, ko, zh-hans
LANGUAGE_CODE = 'en'
# e.g. EUR, GBP, CNY, JPY
REAL_WORLD_CURRENCY_CODE = 'USD'<|fim▁hole|>USE_POINTS = True
ROOMS = []
CHANNEL_ROUTING = 'redirect.routing.channel_routing'
# AUTH_LEVEL:
# this setting controls which parts of your site are freely accessible,
# and which are password protected:
# - If it's not set (the default), then the whole site is freely accessible.
# - If you are launching a study and want visitors to only be able to
# play your app if you provided them with a start link, set it to STUDY.
# - If you would like to put your site online in public demo mode where
# anybody can play a demo version of your game, but not access the rest
# of the admin interface, set it to DEMO.
# for flexibility, you can set it in the environment variable OTREE_AUTH_LEVEL
AUTH_LEVEL = environ.get('OTREE_AUTH_LEVEL')
ADMIN_USERNAME = 'admin'
# for security, best to set admin password in an environment variable
ADMIN_PASSWORD = environ.get('OTREE_ADMIN_PASSWORD')
# Consider '', None, and '0' to be empty/false
DEBUG = (environ.get('OTREE_PRODUCTION') in {None, '', '0'})
DEMO_PAGE_INTRO_HTML = """ """
# don't share this with anybody.
SECRET_KEY = '29*rluv^s95qdbcfe6&mql^2$-_^e7nvtxi_j7r%wl#8g27p(q'
# if an app is included in SESSION_CONFIGS, you don't need to list it here
INSTALLED_APPS = ['otree']<|fim▁end|> | |
<|file_name|>xmlrubyhandler.cpp<|end_file_name|><|fim▁begin|>/******************************************************************************\
* Utopia Player - A cross-platform, multilingual, tagging media manager *
* Copyright (C) 2006-2007 John Eric Martin <[email protected]> *
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License version 2 as *
* published by the Free Software Foundation. *
* *<|fim▁hole|>* GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License *
* along with this program; if not, write to the *
* Free Software Foundation, Inc., *
* 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
\******************************************************************************/
#include "xmlrubyhandler.h"
XmlRubyHandler::XmlRubyHandler()
{
mText.clear();
mRuby.clear();
mColor.clear();
currentText = QString("");
currentRuby = QString("");
currentColor.clear();
currentElement.clear();
};
bool XmlRubyHandler::startDocument()
{
currentElement.clear();
currentElement.push(QString::fromUtf8("BASE"));
mText.clear();
mRuby.clear();
mColor.clear();
currentText = QString("");
currentRuby = QString("");
currentColor.clear();
currentColor.push( QColor("#000000") );
return true;
};
bool XmlRubyHandler::startElement( const QString & namespaceURI, const QString & localName, const QString & qName, const QXmlAttributes & atts )
{
currentElement.push(qName);
if( ( ( qName == "font" && !atts.value("color").isEmpty() ) || qName == "ruby") && !currentText.isEmpty() )
{
mText << currentText;
mRuby << currentRuby;
mColor << currentColor.top();
currentText = QString("");
currentRuby = QString("");
}
if(!atts.value("color").isEmpty())
currentColor.push( QColor( atts.value("color") ) );
return true;
};
bool XmlRubyHandler::characters( const QString & ch )
{
if( currentElement.top() == QString::fromUtf8("rp") || currentElement.top() == QString::fromUtf8("ruby") )
return true;
if( currentElement.top() == QString::fromUtf8("rt") )
{
currentRuby += ch;
return true;
}
currentText += ch;
return true;
};
bool XmlRubyHandler::endElement( const QString & namespaceURI, const QString & localName, const QString & qName )
{
currentElement.pop();
if( ( qName == QString::fromUtf8("ruby") || qName == QString::fromUtf8("font") ) && !currentText.isEmpty() )
{
mText << currentText;
mRuby << currentRuby;
mColor << currentColor.top();
currentText = QString("");
currentRuby = QString("");
}
if( qName == QString::fromUtf8("font") )
currentColor.pop();
return true;
};
bool XmlRubyHandler::endDocument()
{
if( !currentText.isEmpty() )
{
mText << currentText;
mRuby << currentRuby;
mColor << currentColor.top();
}
return true;
};<|fim▁end|> | * This program is distributed in the hope that it will be useful, *
* but WITHOUT ANY WARRANTY; without even the implied warranty of *
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * |
<|file_name|>router.js<|end_file_name|><|fim▁begin|><%= grunt.util._.camelize(appname) %>.Routers.<%= _.classify(name) %>Router = Backbone.Router.extend({
routes: {
"login" : "login"<|fim▁hole|> var self = this;
},
login: function(){
var self = this;
}
});<|fim▁end|> | },
initialize : function(){ |
<|file_name|>rawledger.go<|end_file_name|><|fim▁begin|><|fim▁hole|>Copyright IBM Corp. 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package rawledger
import (
cb "github.com/hyperledger/fabric/protos/common"
ab "github.com/hyperledger/fabric/protos/orderer"
)
// Iterator is useful for a chain Reader to stream blocks as they are created
type Iterator interface {
// Next blocks until there is a new block available, or returns an error if the next block is no longer retrievable
Next() (*cb.Block, cb.Status)
// ReadyChan supplies a channel which will block until Next will not block
ReadyChan() <-chan struct{}
}
// Reader allows the caller to inspect the raw ledger
type Reader interface {
// Iterator retrieves an Iterator, as specified by an cb.SeekInfo message, returning an iterator, and it's starting block number
Iterator(startType ab.SeekInfo_StartType, specified uint64) (Iterator, uint64)
// Height returns the highest block number in the chain, plus one
Height() uint64
}
// Writer allows the caller to modify the raw ledger
type Writer interface {
// Append a new block to the ledger
Append(blockContents []*cb.Envelope, proof []byte) *cb.Block
}
// ReadWriter encapsulated both the reading and writing functions of the rawledger
type ReadWriter interface {
Reader
Writer
}<|fim▁end|> | /* |
<|file_name|>fb2desc.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
# (c) Con Radchenko mailto:[email protected]
#
# $Id: fb2desc.py,v 1.10 2008/09/15 04:18:45 con Exp con $
#
import sys, os
import locale
import getopt
import codecs
import zipfile
from cStringIO import StringIO
import xml.sax
import shutil
import traceback
def get_filename(authors_list, sequence_name, sequence_number, title):
'''Форматы:
1 - "полные имена авторов, разделенные запятой - название (серия #номер)"
2 - тоже, но преобразованное в транслит и с заменой пробелов
3 - "фамилии авторов, разделенные запятой - название"
4 - тоже, но преобразованное в транслит и с заменой пробелов
5 - "первая буква автора в нижнем регистре/авторы, разделенные запятой, в нижнем регистре/авторы, разделенные запятой - название (серия #номер)"
6 - тоже, но преобразованное в транслит и с заменой пробелов
'''
format = options['fn-format']
out = []
authors = []
full_authors = []
for a in authors_list:
if a[0]:
authors.append(a[0])
fa = ' '.join(i for i in a if i)
if fa:
full_authors.append(fa)
authors = ', '.join(authors)
if not authors:
authors = 'unknown'
full_authors = ', '.join(full_authors)
if not full_authors:
full_authors = 'unknown'
if not title:
title = 'unknown'
seq = ''
if sequence_name:
if sequence_number:
seq = '(%s #%s)' % (sequence_name, sequence_number)
else:
seq = '(%s)' % sequence_name
if format == 3:
out.append(authors)
out.append('-')
out.append(title)
out = ' '.join(out)
else:
out.append(full_authors)
out.append('-')
out.append(title)
if seq:
out.append(seq)
out = ' '.join(out)
if format in (2, 4, 6):
out = translit(out)
full_authors = translit(full_authors)
#out = out.replace('/', '%').replace('\0', '').replace('?', '')
for c in '|\\?*<":>+[]/': # invalid chars in VFAT
out = out.replace(c, '')
if format in (4, 5):
full_authors = full_authors.replace(c, '')
fn_max = 240
if format in (5, 6):
fl = full_authors[0]
if not fl.isalpha():
fl = full_authors[1] # FIXME
out = os.path.join(
fl.lower().encode(options['charset']),
full_authors.lower().encode(options['charset'])[:fn_max],
out.encode(options['charset'])[:fn_max])
else:
out = out.encode(options['charset'])[:fn_max]
return out
##----------------------------------------------------------------------
options = {
'format' : '',
'charset' : 'utf-8',
'zip-charset' : 'cp866',
'elements' : [],
'replace' : False,
'rename' : False,
'slink' : False,
'copy' : False,
'fn-format' : 2,
'show-cover' : False,
'show-content' : False,
'show-tree' : False,
'image-viewer' : 'xv',
'quiet' : False,
'dest-dir' : None,
#
'suffix' : None,
}
##----------------------------------------------------------------------
class StopParsing(Exception):
pass
##----------------------------------------------------------------------
# u'\u2013' -> '--'
# u'\u2014' -> '---'
# u'\xa0' -> неразрывный пробел
# u'\u2026' -> dots...
# u'\xab' -> '<<'
# u'\xbb' -> '>>'
# u'\u201c' -> ``
# u'\u201d' -> ''
# u'\u201e' -> ,,
def replace_chars(s):
return (s
.replace(u'\u2013', u'--')
.replace(u'\u2014', u'---')
.replace(u'\xa0' , u' ')
.replace(u'\u2026', u'...')
.replace(u'\xab' , u'<<')
.replace(u'\xbb' , u'>>')
.replace(u'\u201c', u'``')
.replace(u'\u201d', u'\'\'')
.replace(u'\u201e', u',,')
)
def translit(s):
trans_tbl = {
u'\u0430': 'a', #а
u'\u0431': 'b', #б
u'\u0432': 'v', #в
u'\u0433': 'g', #г
u'\u0434': 'd', #д
u'\u0435': 'e', #е
u'\u0451': 'yo', #ё
u'\u0436': 'zh', #ж
u'\u0437': 'z', #з
u'\u0438': 'i', #и
u'\u0439': 'y', #й
u'\u043a': 'k', #к
u'\u043b': 'l', #л
u'\u043c': 'm', #м
u'\u043d': 'n', #н
u'\u043e': 'o', #о
u'\u043f': 'p', #п
u'\u0440': 'r', #р
u'\u0441': 's', #с
u'\u0442': 't', #т
u'\u0443': 'u', #у
u'\u0444': 'f', #ф
u'\u0445': 'h', #х
u'\u0446': 'c', #ц
u'\u0447': 'ch', #ч
u'\u0448': 'sh', #ш
u'\u0449': 'sh', #щ
u'\u044a': '', #ъ
u'\u044b': 'y', #ы
u'\u044c': '', #ь
u'\u044d': 'e', #э
u'\u044e': 'ju', #ю
u'\u044f': 'ya', #я
}
alnum = 'abcdefghijklmnopqrstuvwxyz0123456789'
out = []
out_s = ''
for i in s.lower():
if i.isalnum():
if i in trans_tbl:
out_s += trans_tbl[i]
elif i in alnum:
out_s += i
else:
if out_s: out.append(out_s)
out_s = ''
if out_s: out.append(out_s)
return '_'.join(out)
def wrap_line(s):
if len(s) <= 70:
return u' '+s
ss = u' '
sl = []
for word in s.split():
if len(ss+word) > 72:
sl.append(ss)
ss = word
elif ss:
ss += u' ' + word
else:
ss = word
sl.append(ss)
return '\n'.join(sl)
##----------------------------------------------------------------------
def show_cover(filename, data, content_type):
if not data:
print >> sys.stderr, '%s: sorry, cover not found' % filename
return
import base64, tempfile
data = base64.decodestring(data)
if content_type and content_type.startswith('image/'):
suffix = '.'+content_type[6:]
else:
suffix = ''
tmp_id, tmp_file = tempfile.mkstemp(suffix)
try:
open(tmp_file, 'w').write(data)
os.system(options['image-viewer']+' '+tmp_file)
finally:
os.close(tmp_id)
os.remove(tmp_file)
def show_content(filename, titles):
for secttion_level, data in titles:
if options['replace']: data = replace_chars(data)
print ' '*secttion_level+data.encode(options['charset'], 'replace')
print
def rename(filename, zipfilename, desc, data):
to = pretty_format(filename, zipfilename, len(data), desc, 'filename')
##filename = os.path.abspath(filename)
to += options['suffix']
if options['dest-dir']:
to = os.path.join(options['dest-dir'], to)
to = os.path.abspath(to)
if os.path.exists(to):
print >> sys.stderr, 'file %s already exists' % to
return
dir_name = os.path.dirname(to)
if not os.path.exists(dir_name):
os.makedirs(dir_name)
if options['slink']:
os.symlink(filename, to)
return
elif options['copy']:
shutil.copy(filename, to)
return
os.rename(filename, to)
def pretty_format(filename, zipfilename, filesize, desc, format='pretty'):
ann = []
title = ''
authors_list = []
# [last-name, first-name, middle-name, nick-name]
author_name = [None, None, None, None]
genres = []
sequence_name = ''
sequence_number = ''
for elem, data in desc:
## data = data.strip()
## if not data:
## continue
if elem.startswith('/description/title-info/annotation/'):
if not elem.endswith('href'):
ann.append(data) #wrap_line(data))
if elem.endswith('/p'):
ann.append('\n')
elif elem == '/description/title-info/book-title':<|fim▁hole|> title = data
elif elem == '/description/title-info/author/first-name':
author_name[1] = data
elif elem == '/description/title-info/author/middle-name':
author_name[2] = data
elif elem == '/description/title-info/author/last-name':
author_name[0] = data
authors_list.append(author_name)
author_name = [None, None, None, None]
elif elem == '/description/title-info/author/nick-name':
#author_name[3] = data
if not author_name[0]:
author_name[0] = data
else:
author_name[3] = data
authors_list.append(author_name)
author_name = [None, None, None, None]
elif elem == '/description/title-info/genre':
genres.append(data)
elif elem == '/description/title-info/sequence/name':
sequence_name = data
elif elem == '/description/title-info/sequence/number':
sequence_number = data
##authors_list.sort()
authors = u', '.join(' '.join(n for n in a if n) for a in authors_list if a)
annotation = []
ann = ''.join(ann).split('\n')
for s in ann:
annotation.append(wrap_line(s))
annotation = '\n'.join(annotation)
if format == 'single':
if sequence_name and sequence_number:
out = u'%s - %s (%s %s)' % (authors, title,
sequence_name, sequence_number)
elif sequence_name:
out = u'%s - %s (%s)' % (authors, title, sequence_name)
else:
out = u'%s - %s' % (authors, title)
#out = '%s: %s' % (filename, out)
if options['replace']: out = replace_chars(out)
return out.encode(options['charset'], 'replace')
elif format == 'pretty':
out = u'''\
File : %s
''' % filename
if zipfilename:
out += u'''\
Zip Filename : %s
''' % zipfilename
out += u'''\
Size : %d kb
''' % int(filesize/1024)
out += u'''\
Author(s) : %s
Title : %s
Genres : %s
''' % (authors, title, u', '.join(genres))
if sequence_name:
if sequence_number:
sequence = u'%s (%s)' % (sequence_name, sequence_number)
else:
sequence = sequence_name
out += u'''\
Sequence : %s
''' % sequence
if annotation:
out += u'''\
Annotation :
%s
''' % annotation
if options['replace']: out = replace_chars(out)
return out.encode(options['charset'], 'replace')
elif format == 'filename':
return get_filename(authors_list, sequence_name, sequence_number, title)
def raw_format(filename, zipfilename, desc):
if options['quiet']:
out = u''
else:
out = u'filename: %s\n' % filename
if zipfilename:
out += u'zipfilename: %s\n' % zipfilename
for elem, data in desc:
if not data:
continue
t = filter(elem.startswith, options['elements'])
#t = [x for x in options['elements'] if elem.startswith(x)]
if options['elements'] == [] or t:
out += u'%s: %s\n' % (elem, data)
if options['replace']: out = replace_chars(out)
return out.encode(options['charset'], 'replace')
##----------------------------------------------------------------------
class ContentHandler(xml.sax.handler.ContentHandler):
def __init__(self):
self.elem_stack = []
self.is_desc = False
self.is_cover = False
self.cur_data = ''
self.desc = []
self.cover = ''
self.cover_name = ''
self.cover_content_type = ''
self.is_title = False
self.cur_title = []
self.titles = []
self.section_level = 0
self.tree = []
def startElement(self, name, attrs):
if name == 'description': self.is_desc = True
if name == 'section': self.section_level += 1
if self.is_desc or options['show-tree']:
self.elem_stack.append(name)
elem = '/'+'/'.join(self.elem_stack)
if options['show-tree']:
if self.tree and self.tree[-1][0] == elem:
#print self.tree[-1]
self.tree[-1][1] += 1
else:
#if not elem.endswith('/p') and not elem.endswith('/v'):
self.tree.append([elem, 1])
for atr in attrs.getNames():
#t = (elem+u'/'+atr, attrs.getValue(atr))
self.desc.append((elem+u'/'+atr, attrs.getValue(atr)))
if elem == '/description/title-info/coverpage/image' and \
atr.endswith('href'):
self.cover_name = attrs.getValue(atr)[1:]
self.is_cover = False
if options['show-cover'] and name == 'binary':
content_type = ''
for atr in attrs.getNames():
if atr == 'id' and attrs.getValue(atr) == self.cover_name:
self.is_cover = True
elif atr == 'content-type':
content_type = attrs.getValue(atr)
if self.is_cover and content_type:
self.cover_content_type = content_type
if options['show-content'] and name == 'title':
self.is_title = True
self.cur_title = []
def endElement(self, name):
if self.is_desc and self.cur_data:
elem_name = '/'+'/'.join(self.elem_stack)
self.desc.append((elem_name, self.cur_data.strip()))
self.cur_data = ''
if self.is_desc or options['show-tree']:
del self.elem_stack[-1]
if name == 'description':
if not options['show-cover'] \
and not options['show-content'] \
and not options['show-tree']:
raise StopParsing
else:
self.is_desc = False
if options['show-content'] and name == 'title':
self.is_title = False
self.titles.append((self.section_level, ' '.join(self.cur_title)))
self.cur_data = ''
if name == 'section': self.section_level -= 1
def characters(self, data):
if self.is_desc:
#data = data.strip()
data = data.replace('\n', ' ')
if self.cur_data:
self.cur_data += data
else:
self.cur_data = data
if options['show-cover'] and self.is_cover:
self.cover += data
if options['show-content'] and self.is_title:
data = data.strip()
if data: self.cur_title.append(data)
class ErrorHandler(xml.sax.handler.ErrorHandler): pass
class EntityResolver(xml.sax.handler.EntityResolver): pass
class DTDHandler(xml.sax.handler.DTDHandler): pass
##----------------------------------------------------------------------
def fb2parse(filename, zipfilename, data):
if not data.startswith('<?xml') and not data.startswith('\xef\xbb\xbf<?xml'):
print >> sys.stderr, \
'Warning: file %s is not an XML file. Skipped.' % filename
print repr(data[:5])
#shutil.copy(filename, '/home/con/t/')
return
chandler = ContentHandler()
input_source = xml.sax.InputSource()
input_source.setByteStream(StringIO(data))
xml_reader = xml.sax.make_parser()
xml_reader.setContentHandler(chandler)
xml_reader.setErrorHandler(ErrorHandler())
xml_reader.setEntityResolver(EntityResolver())
xml_reader.setDTDHandler(DTDHandler())
try:
xml_reader.parse(input_source)
except StopParsing:
pass
if options['rename']:
rename(filename, zipfilename, chandler.desc, data)
return
if options['show-tree']:
for e, n in chandler.tree:
if n > 1:
print '%s [%d]' % (e, n)
else:
print e
return
if options['format'] == 'pretty':
print pretty_format(filename, zipfilename, len(data), chandler.desc, 'pretty')
elif options['format'] == 'filename':
print pretty_format(filename, zipfilename, len(data), chandler.desc, 'filename')
elif options['format'] == 'single':
print pretty_format(filename, zipfilename, len(data), chandler.desc, 'single')
elif options['format'] == '' \
and not options['show-cover'] \
and not options['show-content']:
print raw_format(filename, zipfilename, chandler.desc)
if options['show-cover'] or options['show-content']:
if options['format'] == 'raw':
print raw_format(filename, zipfilename, chandler.desc)
if options['show-content']:
show_content(filename, chandler.titles)
if options['show-cover']:
show_cover(filename, chandler.cover, chandler.cover_content_type)
##----------------------------------------------------------------------
def main():
#locale.setlocale(locale.LC_ALL, '')
default_charset = locale.getdefaultlocale()[1]
if default_charset:
options['charset'] = default_charset
prog_name = os.path.basename(sys.argv[0])
try:
optlist, args = getopt.getopt(sys.argv[1:], 'c:Ce:f:hlopqrRStvwz:',
['raw', 'pretty',
'single',
'output=',
'rename', 'copy', 'slink',
'fn-format=',
'cover', 'contents', 'tree',
'charset=', 'zip-charset=',
'elements=',
'dest-dir=',
'image-viewer=',
'replace', 'quiet', 'help'])
except getopt.GetoptError, err:
sys.exit('%s: %s\ntry %s --help for more information'
% (prog_name, err, prog_name))
help_msg = '''fb2desc -- show description of FB2 file(s)
Usage: %s [options] files|dir
-w --raw-format output in raw format (default)
-p --pretty output in pretty format
-l --single output in single format
--output format output in format (raw, pretty, single, filename)
-o --contents show contents
-t --tree
-v --cover show cover
-c --charset <charset> specify output charset (default: %s)
-z --zip-charset <charset>
-r --replace replace any chars
-e --elements <elements> show only this elements (comma separeted)
-R --rename rename mode
-S --slink create softlinks
-C --copy copy files
--fn-format <format> rename pattern (1, 2, 3, 4, 5, 6)
--dest-dir
--image-viewer
-q --quiet suppress output filename
-h --help display this help''' \
% (prog_name, default_charset)
for i in optlist:
if i[0] == '--help' or i[0] == '-h':
print help_msg
sys.exit()
elif i[0] in ('--charset', '-c'):
charset = i[1]
try:
codecs.lookup(charset)
except LookupError, err:
sys.exit('%s: %s' % (prog_name, err))
options['charset'] = charset
elif i[0] in ('-z', '--zip-charset'):
charset = i[1]
try:
codecs.lookup(charset)
except LookupError, err:
sys.exit('%s: %s' % (prog_name, err))
options['zip-charset'] = charset
elif i[0] == '--elements' or i[0] == '-e':
options['elements'] = i[1].split(',')
elif i[0] == '--output':
f = i[1]
if f not in ('raw', 'pretty', 'single', 'filename'):
sys.exit('''bad option for --output
must be raw, pretty, single, filename
''')
options['format'] = f
elif i[0] == '--raw' or i[0] == '-w':
options['format'] = 'raw'
elif i[0] == '--single' or i[0] == '-l':
options['format'] = 'single'
elif i[0] == '--pretty-format' or i[0] == '-p':
options['format'] = 'pretty'
elif i[0] == '--replace' or i[0] == '-r':
options['replace'] = True
elif i[0] == '--rename' or i[0] == '-R':
options['rename'] = True
elif i[0] == '--slink' or i[0] == '-S':
options['rename'] = True
options['slink'] = True
elif i[0] == '--copy' or i[0] == '-C':
options['rename'] = True
options['copy'] = True
elif i[0] in ('--fn-format', '-f'):
f = i[1]
if f not in ('1', '2', '3', '4', '5', '6'):
sys.exit('''bad option for --fn-format
must be 1, 2, 3, 4, 5, 6
''')
options['fn-format'] = int(f)
elif i[0] == '--contents' or i[0] == '-o':
options['show-content'] = True
elif i[0] == '--cover' or i[0] == '-v':
options['show-cover'] = True
elif i[0] == '--tree' or i[0] == '-t':
options['show-tree'] = True
elif i[0] == '--quiet' or i[0] == '-q':
options['quiet'] = True
elif i[0] == '--dest-dir':
options['dest-dir'] = i[1]
elif i[0] == '--image-viewer':
options['image-viewer'] = i[1]
if len(args) == 0:
sys.exit('%s: missing filename\ntry %s --help for more information'
% (prog_name, prog_name))
in_files = []
for fn in args:
if os.path.isdir(fn):
for root, dirs, files in os.walk(fn):
for f in files:
in_files.append(os.path.join(root, f))
else:
in_files.append(fn)
#print in_files
#return
for raw_filename in in_files:
try:
filename = os.path.abspath(raw_filename)
filename = unicode(filename, options['charset'])
except UnicodeDecodeError, err:
#raise
#print >> sys.stderr, 'WARNING: decode filename:', str(err)
#continue
filename = '' # fixme
pass
if zipfile.is_zipfile(raw_filename):
options['suffix'] = '.fb2.zip'
zf = zipfile.ZipFile(raw_filename)
for zip_filename in zf.namelist():
data = zf.read(zip_filename)
try:
##zip_filename = unicode(zip_filename, options['charset'])
zip_filename = unicode(zip_filename, options['zip-charset'])
except UnicodeDecodeError, err:
print >> sys.stderr, 'WARNING: decode zip filename:', str(err)
zip_filename = ''
try:
fb2parse(filename, zip_filename, data)
except:
traceback.print_exc()
##shutil.copy(raw_filename, '/home/con/t/')
else:
if options['rename']:
continue
else:
options['suffix'] = '.fb2'
data = None
try:
data = open(raw_filename).read()
except IOError as e:
data = open(filename).read()
if data.startswith('BZh'):
import bz2
options['suffix'] = '.fb2.bz2'
data = bz2.decompress(data)
elif data.startswith('\x1f\x8b'):
import gzip
options['suffix'] = '.fb2.gz'
data = gzip.GzipFile(fileobj=StringIO(data)).read()
try:
fb2parse(filename, '', data)
except:
traceback.print_exc()
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>CMD.py<|end_file_name|><|fim▁begin|>from subprocess import PIPE, Popen
from sqlalchemy import create_engine
def run(p):
try:
p["log"].info(p["action"]['query'])<|fim▁hole|> message = ''
for r in result:
if r: message += r + '\n'
p["log"].success(message)
except Exception, e:
AllGood = False
p["log"].error("command line execution failed",e)
return True<|fim▁end|> | proc = Popen(p["action"]['query'], shell=True,
stdin=PIPE, stdout=PIPE, stderr=PIPE)
result = proc.communicate() |
<|file_name|>movimento.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.urls import reverse_lazy
from django.shortcuts import redirect
from itertools import chain
from datetime import datetime
from decimal import Decimal
from djangosige.apps.base.custom_views import CustomDetailView, CustomCreateView, CustomListView
from djangosige.apps.estoque.forms import EntradaEstoqueForm, SaidaEstoqueForm, TransferenciaEstoqueForm, ItensMovimentoFormSet
from djangosige.apps.estoque.models import MovimentoEstoque, EntradaEstoque, SaidaEstoque, TransferenciaEstoque, ProdutoEstocado
class MovimentoEstoqueMixin(object):
def adicionar_novo_movimento_estoque(self, itens_mvmt_obj, pform, lista_produtos, lista_produtos_estocados):
prod = itens_mvmt_obj.produto
lista_produtos.append(prod)
# Modificar valor do estoque atual dos produtos
if prod.estoque_atual is not None and isinstance(self.object, EntradaEstoque):
prod_estocado = ProdutoEstocado.objects.get_or_create(
local=self.object.local_dest, produto=itens_mvmt_obj.produto)[0]
prod_estocado.quantidade = prod_estocado.quantidade + itens_mvmt_obj.quantidade
lista_produtos_estocados.append(prod_estocado)
prod.estoque_atual = prod.estoque_atual + itens_mvmt_obj.quantidade
elif prod.estoque_atual is not None and isinstance(self.object, SaidaEstoque):
prod_estocado = ProdutoEstocado.objects.get_or_create(
local=self.object.local_orig, produto=itens_mvmt_obj.produto)[0]
if itens_mvmt_obj.quantidade > prod_estocado.quantidade:
itens_mvmt_obj.quantidade = prod_estocado.quantidade
prod_estocado.quantidade = Decimal('0.00')
else:
prod_estocado.quantidade = prod_estocado.quantidade - itens_mvmt_obj.quantidade
lista_produtos_estocados.append(prod_estocado)
if prod.estoque_atual < itens_mvmt_obj.quantidade:
pform.add_error('quantidade', 'Quantidade retirada do estoque maior que o estoque atual (' +
str(prod.estoque_atual).replace('.', ',') + ') do produto.')
else:
prod.estoque_atual = prod.estoque_atual - itens_mvmt_obj.quantidade
elif isinstance(self.object, TransferenciaEstoque):
prod_estocado_orig = ProdutoEstocado.objects.get_or_create(
local=self.object.local_estoque_orig, produto=itens_mvmt_obj.produto)[0]
prod_estocado_dest = ProdutoEstocado.objects.get_or_create(
local=self.object.local_estoque_dest, produto=itens_mvmt_obj.produto)[0]
if itens_mvmt_obj.quantidade > prod_estocado_orig.quantidade:
itens_mvmt_obj.quantidade = prod_estocado_orig.quantidade
prod_estocado_orig.quantidade = Decimal('0.00')
else:
prod_estocado_orig.quantidade = prod_estocado_orig.quantidade - \
itens_mvmt_obj.quantidade
prod_estocado_dest.quantidade = prod_estocado_dest.quantidade + \
itens_mvmt_obj.quantidade
lista_produtos_estocados.append(prod_estocado_orig)
lista_produtos_estocados.append(prod_estocado_dest)
class AdicionarMovimentoEstoqueBaseView(CustomCreateView, MovimentoEstoqueMixin):
permission_codename = 'add_movimentoestoque'
def get_success_message(self, cleaned_data):
return self.success_message % dict(cleaned_data, pk=self.object.pk)
def get_context_data(self, **kwargs):
context = super(AdicionarMovimentoEstoqueBaseView,
self).get_context_data(**kwargs)
return self.view_context(context)
def get(self, request, *args, **kwargs):
self.object = None
form_class = self.get_form_class()
form = form_class()
form.initial['data_movimento'] = datetime.today().strftime('%d/%m/%Y')
itens_form = ItensMovimentoFormSet(prefix='itens_form')
return self.render_to_response(self.get_context_data(form=form, itens_form=itens_form,))
def post(self, request, *args, **kwargs):
self.object = None
# Tirar . dos campos decimais
req_post = request.POST.copy()
for key in req_post:
if ('quantidade' in key or
'valor' in key or
'total' in key):
req_post[key] = req_post[key].replace('.', '')
request.POST = req_post
form_class = self.get_form_class()
form = self.get_form(form_class)
itens_form = ItensMovimentoFormSet(request.POST, prefix='itens_form')
if (form.is_valid() and itens_form.is_valid()):
self.object = form.save(commit=False)
lista_produtos = []
lista_produtos_estocados = []
itens_form.instance = self.object
for pform in itens_form:
if pform.cleaned_data != {}:
itens_mvmt_obj = pform.save(commit=False)
itens_mvmt_obj.movimento_id = self.object
self.adicionar_novo_movimento_estoque(
itens_mvmt_obj, pform, lista_produtos, lista_produtos_estocados)
# Verificar se movimentos de estoque invalidos existem
if len(pform.errors):
return self.form_invalid(form=form, itens_form=itens_form)
else:
self.object.save()
itens_form.save()
for prod in lista_produtos:
prod.save()
for prod_estocado in lista_produtos_estocados:
prod_estocado.save()
return self.form_valid(form)
return self.form_invalid(form=form, itens_form=itens_form)
class AdicionarEntradaEstoqueView(AdicionarMovimentoEstoqueBaseView):
form_class = EntradaEstoqueForm
template_name = "estoque/movimento/movimento_estoque_add.html"
success_url = reverse_lazy('estoque:listaentradasestoqueview')
success_message = "<b>Movimento de estoque de entrada nº%(pk)s</b> adicionado com sucesso."
def view_context(self, context):
context['title_complete'] = 'ADICIONAR ENTRADA EM ESTOQUE'
context['return_url'] = reverse_lazy(
'estoque:listaentradasestoqueview')
return context
class AdicionarSaidaEstoqueView(AdicionarMovimentoEstoqueBaseView):
form_class = SaidaEstoqueForm
template_name = "estoque/movimento/movimento_estoque_add.html"
success_url = reverse_lazy('estoque:listasaidasestoqueview')
success_message = "<b>Movimento de estoque de saída nº%(pk)s</b> adicionado com sucesso."
def view_context(self, context):
context['title_complete'] = 'ADICIONAR SAÍDA EM ESTOQUE'
context['return_url'] = reverse_lazy('estoque:listasaidasestoqueview')
return context
class AdicionarTransferenciaEstoqueView(AdicionarMovimentoEstoqueBaseView):
form_class = TransferenciaEstoqueForm
template_name = "estoque/movimento/movimento_estoque_add.html"
success_url = reverse_lazy('estoque:listatransferenciasestoqueview')
success_message = "<b>Movimento de estoque de transferência nº%(pk)s</b> adicionado com sucesso."
def view_context(self, context):
context['title_complete'] = 'ADICIONAR TRANSFERÊNCIA EM ESTOQUE'
context['return_url'] = reverse_lazy(
'estoque:listatransferenciasestoqueview')
return context
class MovimentoEstoqueBaseListView(CustomListView):
permission_codename = 'view_movimentoestoque'
def get_context_data(self, **kwargs):
context = super(MovimentoEstoqueBaseListView,
self).get_context_data(**kwargs)
return self.view_context(context)
class MovimentoEstoqueListView(MovimentoEstoqueBaseListView):
template_name = 'estoque/movimento/movimento_estoque_list.html'
context_object_name = 'all_movimentos'
success_url = reverse_lazy('estoque:listamovimentoestoqueview')
def view_context(self, context):
context['title_complete'] = 'TODAS AS MOVIMENTAÇÕES DE ESTOQUE'
return context
def get_queryset(self):
all_entradas = EntradaEstoque.objects.all()<|fim▁hole|> all_saidas = SaidaEstoque.objects.all()
all_transferencias = TransferenciaEstoque.objects.all()
all_movimentos = list(
chain(all_saidas, all_entradas, all_transferencias))
return all_movimentos
def post(self, request, *args, **kwargs):
if self.check_user_delete_permission(request, MovimentoEstoque):
for key, value in request.POST.items():
if value == "on":
if EntradaEstoque.objects.filter(id=key).exists():
instance = EntradaEstoque.objects.get(id=key)
elif SaidaEstoque.objects.filter(id=key).exists():
instance = SaidaEstoque.objects.get(id=key)
elif TransferenciaEstoque.objects.filter(id=key).exists():
instance = TransferenciaEstoque.objects.get(id=key)
instance.delete()
return redirect(self.success_url)
class EntradaEstoqueListView(MovimentoEstoqueBaseListView):
template_name = 'estoque/movimento/movimento_estoque_list.html'
model = EntradaEstoque
context_object_name = 'all_entradas'
success_url = reverse_lazy('estoque:listaentradasestoqueview')
def view_context(self, context):
context['title_complete'] = 'ENTRADAS EM ESTOQUE'
context['add_url'] = reverse_lazy('estoque:addentradaestoqueview')
return context
class SaidaEstoqueListView(MovimentoEstoqueBaseListView):
template_name = 'estoque/movimento/movimento_estoque_list.html'
model = SaidaEstoque
context_object_name = 'all_saidas'
success_url = reverse_lazy('estoque:listasaidasestoqueview')
def view_context(self, context):
context['title_complete'] = 'SAÍDAS EM ESTOQUE'
context['add_url'] = reverse_lazy('estoque:addsaidaestoqueview')
return context
class TransferenciaEstoqueListView(MovimentoEstoqueBaseListView):
template_name = 'estoque/movimento/movimento_estoque_list.html'
model = TransferenciaEstoque
context_object_name = 'all_transferencias'
success_url = reverse_lazy('estoque:listatransferenciasestoqueview')
def view_context(self, context):
context['title_complete'] = 'TRANSFERÊNCIAS EM ESTOQUE'
context['add_url'] = reverse_lazy(
'estoque:addtransferenciaestoqueview')
return context
class DetalharMovimentoEstoqueBaseView(CustomDetailView):
template_name = "estoque/movimento/movimento_estoque_detail.html"
permission_codename = 'view_movimentoestoque'
def get_context_data(self, **kwargs):
context = super(DetalharMovimentoEstoqueBaseView,
self).get_context_data(**kwargs)
return self.view_context(context)
class DetalharEntradaEstoqueView(DetalharMovimentoEstoqueBaseView):
model = EntradaEstoque
def view_context(self, context):
context['title_complete'] = 'MOVIMENTO DE ENTRADA EM ESTOQUE N°' + \
str(self.object.id)
context['return_url'] = reverse_lazy(
'estoque:listaentradasestoqueview')
return context
class DetalharSaidaEstoqueView(DetalharMovimentoEstoqueBaseView):
model = SaidaEstoque
def view_context(self, context):
context['title_complete'] = 'MOVIMENTO DE SAÍDA EM ESTOQUE N°' + \
str(self.object.id)
context['return_url'] = reverse_lazy('estoque:listasaidasestoqueview')
return context
class DetalharTransferenciaEstoqueView(DetalharMovimentoEstoqueBaseView):
model = TransferenciaEstoque
def view_context(self, context):
context['title_complete'] = 'MOVIMENTO DE TRANSFERÊNCIA EM ESTOQUE N°' + \
str(self.object.id)
context['return_url'] = reverse_lazy(
'estoque:listatransferenciasestoqueview')
return context<|fim▁end|> | |
<|file_name|>assets.js<|end_file_name|><|fim▁begin|>// This file is part of Warbirds BDA Script Generator.
// Foobar is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Warbirds BDA Script Generator is distributed in the hope that it will
// be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Foobar. If not, see <http://www.gnu.org/licenses/>.
// (c) Noflyz - United Mud Movers - noflyz.wix.com/united-mud-movers
(function() {
var assets = [{"name": "G0", "label" : "88Flack", "damage" : "500", "sdesc" : "0 otOT_88_FLACK Runway gun or 88 Flack", "required" : "N", "description" : "Large flak gun in sandbag ring."},
{"name": "G1", "label" : "MG", "damage" : "200", "sdesc" : "1 otOT_RUNWAY_GUN Aircraft factory gun", "required" : "N", "description" : "Twin MG position with man."},
{"name": "G2", "label" : "20mm", "damage" : "200", "sdesc" : "2 otOT_FUEL_FACTORY_GUN Fuel factory gun", "required" : "N", "description" : "Small flak position in dirt ring OR medium sandbag ring with MG and man." },
{"name": "G3", "label" : "40mm", "damage" : "300", "sdesc" : "3 otOT_MUNITION_FACTORY_GUN Munition factory gun", "required" : "N", "description" : "Medium flak postion in dirt ring OR large sandbag ring with wheeled gun." },
{"name": "G4", "label" : "HQGun", "damage" : "500", "sdesc" : "4 otOT_HQ_GUN HQ gun", "required" : "Y", "description" : "Camo Arty Ring Position with special significance." },
{"name": "HG", "label" : "Hanger", "damage" : "2500", "sdesc" : "5 otOT_HANGER Hangar", "required" : "Y", "description" : "Airfield Structures of various types and sizes to house aircraft." },
{"name": "RW", "label" : "Runway", "damage" : "20000", "sdesc" : "6 otOT_RUNWAY Runway", "required" : "N", "description" : "Ground texture representing base layer of airfields and towns." },
{"name": "HQ", "label" : "HQ", "damage" : "2000", "sdesc" : "7 otOT_HQ HQ (head quarters)", "required" : "Y", "description" : "Any building with special significance." },
{"name": "TW", "label" : "Tower", "damage" : "2000", "sdesc" : "8 otOT_TOWER Tower", "required" : "N", "description" : "Control object for each field of various types, needed for capture." },
{"name": "TX", "label" : "TaxiWay", "damage" : "8000", "sdesc" : "9 otOT_TAXY_WAY Taxi way", "required" : "N", "description" : "Ground texture representing a specific base area of airfields and towns." },
{"name": "BO", "label" : "Unit", "damage" : "100", "sdesc" : "10 otOT_BOMBABLE_OBJECT Bombable object", "required" : "N", "description" : "Any small unimportant structure not necessary for base closure." },
{"name": "BE", "label" : "BoatEntry", "damage" : "20000", "sdesc" : "11 otOT_BOAT_ENTRY Boat entry", "required" : "N", "description" : "Any very large structure not necessary for base closure." },
{"name": "CB", "label" : "Carrier", "damage" : "250", "sdesc" : "12 otOT_CARRIER Carrier", "required" : "Y", "description" : "Trucks and M5 HTs - carrier of supplies and troops." },
{"name": "BB", "label" : "Boat", "damage" : "1000", "sdesc" : "13 otOT_BOMBABLE_BOAT Bombable boat", "required" : "Y", "description" : "Vic56 small coastal boat" },
{"name": "ST", "label" : "Strat", "damage" : "10000", "sdesc" : "14 otOT_STRAT_OBJECT Strat object", "required" : "N", "description" : "Not in use - possible special strategic target" },
{"name": "RD", "label" : "RadarEm", "damage" : "2000", "sdesc" : "15 otOT_RADAR_EMITTER Radar emitter", "required" : "Y", "description" : "Square radar mast or circular listening dish - determines radar ingame." },
{"name": "C0", "label" : "ReadyRoom", "damage" : "1250", "sdesc" : "16 otOT_READY_ROOM Ready room", "required" : "Y", "description" : "Squadron assembly room at airfields" },
{"name": "C1", "label" : "Capital", "damage" : "2000", "sdesc" : "17 otOT_CAPITAL Capital", "required" : "Y", "description" : "DD1 or DD2 Destroyer - capital ship" },
{"name": "AS", "label" : "Asteroid", "damage" : "500", "sdesc" : "18 otOT_ASTERIOD Asteroid", "required" : "Y", "description" : "Barrage or Observation Balloons" },
{"name": "FM", "label" : "FacMod", "damage" : "4000", "sdesc" : "19 otOT_FACTORY_MODULE Factory module", "required" : "Y", "description" : "Various medium-sized associated factory buildings." },
{"name": "LM", "label" : "LivMod", "damage" : "2000", "sdesc" : "20 otOT_LIVING_MODULE Living module", "required" : "Y", "description" : "Various large housing developments at industry and urban areas." },
{"name": "DM", "label" : "Dock", "damage" : "1000", "sdesc" : "21 otOT_DOCKING_MODULE Docking module", "required" : "Y", "description" : "Wood dock or steel bridge span" },
{"name": "SM", "label" : "Struct", "damage" : "500", "sdesc" : "22 otOT_STRUCTURE_MODULE Structure module", "required" : "Y", "description" : "Any small structure necessary for base closure." },
{"name": "CM", "label" : "Cargo", "damage" : "1500", "sdesc" : "23 otOT_CARGO_MODULE Cargo module", "required" : "Y", "description" : "Freighter - cargo ship" },
{"name": "WM", "label" : "Weapon", "damage" : "750", "sdesc" : "24 otOT_WEAPONS_MODULE Weapons module", "required" : "Y", "description" : "Tanks, M16s or M3s - armoured weapons" },
{"name": "PL", "label" : "Planet", "damage" : "50000", "sdesc" : "25 otOT_PLANET Planet", "required" : "N", "description" : "Not in use - possible terrain object indestructable" },
{"name": "X?","label" : "Link", "damage" : "100", "sdesc" : "26 otOT_LINK Link", "required" : "N", "description" : "Not in use - possible communication link between fields" },
{"name": "PA", "label" : "ParkedAC", "damage" : "350", "sdesc" : "27 otOT_PARKED_AIRCRAFT Parked Aircraft", "required" : "Y", "description" : "Parked a/c types - various types specific to terrains." },
{"name": "A0", "label" : "Arty0", "damage" : "500", "sdesc" : "28 otOT_ARTILLERY0 Artillery Type 0", "required" : "Y", "description" : "Camo Arty Ring Position representing field artillery(up to 105mm)" },
{"name": "A1", "label" : "Arty1", "damage" : "700", "sdesc" : "29 otOT_ARTILLERY1 Artillery Type 1", "required" : "Y", "description" : "Camo Arty Ring Position representing heavy artillery(above 105mm)" },
{"name": "A2", "label" : "Arty2", "damage" : "1000", "sdesc" : "30 otOT_ARTILLERY2 Artillery Type 2", "required" : "Y", "description" : "Camo Arty Ring Position representing coastal artillery(above 200mm)" },
{"name": "FD", "label" : "FuelDmp", "damage" : "500", "sdesc" : "31 otOT_FUEL_DUMP Fuel Dump", "required" : "Y", "description" : "Small fuel tanks at airfields and other locations." },
{"name": "RS", "label" : "RadarStn", "damage" : "1500", "sdesc" : "32 otOT_RADAR_STATION Radar Station", "required" : "Y", "description" : "Radar control building with mast - does not determine radar ingame." },
{"name": "WH", "label" : "Warehouse", "damage" : "2000", "sdesc" : "33 otOT_WAREHOUSE Warehouse", "required" : "Y", "description" : "Various large buildings that store goods at all locations." },
{"name": "AD", "label" : "AmmoDump", "damage" : "1250", "sdesc" : "34 otOT_AMMO_DUMP Ammunition Dump", "required" : "Y", "description" : "Various fortified ammunition storage bunkers at airfields and tactical positions." },
{"name": "HT", "label" : "Hut", "damage" : "600", "sdesc" : "35 otOT_HUT Hut/Tent", "required" : "Y", "description" : "Tent and tent hangars." },
{"name": "HS", "label" : "House", "damage" : "800", "sdesc" : "36 otOT_HOUSE House", "required" : "Y", "description" : "Various small general structures at all locations." },
{"name": "RK", "label" : "Rock", "damage" : "500", "sdesc" : "37 otOT_ROCK Rock", "required" : "Y", "description" : "Stone bridge span OR similar stone structure like walls." },
{"name": "TR", "label" : "Tree", "damage" : "200", "sdesc" : "38 otOT_TREE Tree", "required" : "N", "description" : "Tree object - different than tree clutter objects" },
{"name": "B1", "label" : "Bridge", "damage" : "2500", "sdesc" : "39 otOT_BRIDGE", "required" : "N", "description" : "Port base object - TERRAIN SPECIFIC" },
{"name": "G5", "label" : "37mm-AT", "damage" : "300", "sdesc" : "40 otOT_ANTITANK_1", "required" : "Y", "description" : "Large sandbag ring with wheeled gun." },
{"name": "G6", "label" : "75mm-AT", "damage" : "400", "sdesc" : "41 otOT_ANTITANK_2", "required" : "Y" , "description" : "Camo Arty Ring Position representing large AT guns." },
{"name": "EA", "label" : "Factory", "damage" : "3000", "sdesc" : "42 otOT_FACTORY Factory", "required" : "Y", "description" : "Various large factory buildings at industry locations." },
{"name": "EB", "label" : "FactComplex", "damage" : "5000", "sdesc" : "43 otOT_FACTCOMPLEX FactComplex", "required" : "Y", "description" : "Very large factory building at industry locations." },
{"name": "EC", "label" : "FactAvionics", "damage" : "3000", "sdesc" : "44 otOT_FACTAVIONICS FactAvionics", "required" : "Y", "description" : "Large peaked factory building at industry locations." },
{"name": "ED", "label" : "FactBall1", "damage" : "3000", "sdesc" : "45 otOT_FACTBALL FactBall1", "required" : "Y", "description" : "Medium factory building at industry locations." },
{"name": "EE", "label" : "FactBall2", "damage" : "4000", "sdesc" : "46 otOT_FACTBALL2 FactBall2", "required" : "Y", "description" : "Large factory building at industry locations." },
{"name": "EF", "label" : "Warehouse2", "damage" : "3000", "sdesc" : "47 otOT_WAREHOUSE2 Warehouse2", "required" : "Y", "description" : "Various very large buildings that store goods at major locations." },
{"name": "EG", "label" : "Crane", "damage" : "1000", "sdesc" : "48 otOT_CRANE Crane", "required" : "Y", "description" : "Cranes at ports, industry and railway yards - two types." },
{"name": "EH", "label" : "Pontoon", "damage" : "300", "sdesc" : "49 otOT_PONTOON Pontoon", "required" : "Y", "description" : "Pontoon bridge span" },
{"name": "EI", "label" : "ReFinBuild1", "damage" : "4000", "sdesc" : "50 otOT_REFINBUILD1 ReFinBuild1", "required" : "Y", "description" : "Medium refinery building at industry locations." },
{"name": "EJ", "label" : "RefFinSep", "damage" : "700", "sdesc" : "51 otOT_REFINSTEP RefFinSep", "required" : "Y", "description" : "Oil industry refinery seperation tower." },
{"name": "EK", "label" : "ReFinTank", "damage" : "1250", "sdesc" : "52 otOT_REFINTANK ReFinTank", "required" : "Y", "description" : "Large fuel tanks at industry and port locations." },
{"name": "EL", "label" : "RefinPipe", "damage" : "700", "sdesc" : "53 otOT_REFINPIPE RefinPipe", "required" : "Y", "description" : "Industry smoke stack - all industry types" },
{"name": "EM", "label" : "Power1", "damage" : "1000", "sdesc" : "54 otOT_POWER1 Power1", "required" : "Y", "description" : "Small power generator building at industry locations." },
{"name": "EN", "label" : "Power2", "damage" : "1500", "sdesc" : "55 otOT_POWER2 Power2", "required" : "Y", "description" : "Medium power generator building at industry locations." },
{"name": "EO", "label" : "Power3", "damage" : "2000", "sdesc" : "56 otOT_POWER3 Power3", "required" : "Y", "description" : "Large power generator building at industry locations." },
{"name": "EP", "label" : "Uboat", "damage" : "1000", "sdesc" : "57 otOT_UBOAT Uboat", "required" : "Y", "description" : "Not in use - possible submarine target" },
{"name": "EQ", "label" : "Gas1", "damage" : "750", "sdesc" : "58 otOT_GAS1 Gas1", "required" : "Y", "description" : "Medium sized fuel tanks at industry locations." },
{"name": "ER", "label" : "Subpen", "damage" : "3000", "sdesc" : "59 otOT_SUBPEN Subpen", "required" : "Y", "description" : "Large concrete sub hangars at ports OR dam sections across rivers." },
{"name": "ES", "label" : "ObsCnCr", "damage" : "1000", "sdesc" : "60 otOT_OBSCNCR ObsCnCr", "required" : "Y", "description" : "Observation Control Center - control bunker in fortified defenses." },
{"name": "ET", "label" : "Barracks01", "damage" : "2000", "sdesc" : "61 otOT_BARRACKS01 Barracks01", "required" : "Y", "description" : "Various large buildings that house troops at airfields, bases and ports." },
{"name": "EU", "label" : "Bunker", "damage" : "1500", "sdesc" : "62 otOT_BUNKER Bunker", "required" : "Y", "description" : "Various small fortified buildings at airfields or tactical postions." },
{"name": "EV", "label" : "LightTower", "damage" : "1000", "sdesc" : "63 otOT_LIGHTTOWER LightTower", "required" : "Y", "description" : "Coastal lighthouse - on coastlines and at seaports." },
{"name": "EW", "label" : "CntrlRail", "damage" : "1500", "sdesc" : "64 otOT_CNTRLRAIL CntrlRail", "required" : "Y", "description" : "Railway platform for passenger trains." },
{"name": "EX", "label" : "MLine1", "damage" : "2000", "sdesc" : "65 otOT_MLINE1 MLine1", "required" : "Y", "description" : "Small concrete blockhouse with MG turrets - in fortified areas." },
{"name": "EY", "label" : "MLine2", "damage" : "3000", "sdesc" : "66 otOT_MLINE2 MLine2", "required" : "Y", "description" : "Large concrete blockhouse with gun turrets - in fortified areas." },
{"name": "G7", "label" : "DOAFlack", "damage" : "500", "sdesc" : "67 otOT_DOA_FLACK", "required" : "N", "description" : "DOA specific" },
{"name": "G8", "label" : "BritRifle", "damage" : "10", "sdesc" : "68 otOT_BRITRIFLE", "required" : "N", "description" : "DOA specific" },
{"name": "G9", "label" : "GermRifle", "damage" : "10", "sdesc" : "69 otOT_GERMRIFLE", "required" : "N", "description" : "DOA specific" },
{"name": "GA", "label" : "TrenchMG", "damage" : "200", "sdesc" : "70 otOT_TRENCHMG", "required" : "N", "description" : "DOA specific" },
{"name": "GB", "label" : "AAAMG", "damage" : "200", "sdesc" : "71 otOT_AAAMG", "required" : "N", "description" : "DOA specific"}];
var app = angular.module('assets', []);
app.directive('assetList', function() {
var o= { restrict: 'E',
templateUrl: 'asset-list.html',
controller:
function() {
var ctrl = this;
this.assets = assets;
this.sel = assets[0];
this.dtf = "%%\n%%\n";
this.selNo = 1;
this.startNo = 1;
this.fieldNum = 1;
this.numbers = [1,2,3,4,5,6,7,8,9,10];
this.needed = {};
this.get_asset = function(k) {
for (var i = 0; i < this.assets.length; ++i) {
if (this.assets[i].name === k) {
return this.assets[i];
}
}
return null;
};
this.add_asset = function() {
this.needed[this.sel.name] = [this.sel.description, this.startNo, this.selNo];
this.gen_dtf();
};
this.remove_asset = function(k) {
console.log(k);
delete this.needed[k];
this.gen_dtf();
};
this.gen_dtf = function() {
Number.prototype.pad = function(size) {
var s = String(this);
while (s.length < (size || 2)) {s = "0" + s;}
return s;
};
this.dtf = "%%\n";
// Initialize variables<|fim▁hole|>
// Generate counts
for (var k in this.needed) {
var o = this.needed[k];
var s = o[1];
var e = o[2];
for (var i = s; i <= e; ++i) {
this.dtf += "if (DESTROYED(GROUNDOBJECT(\"F" +
this.fieldNum.pad(3) + k + i.pad(3)
+ "\")))\n{\n}\nelse\n{\n"
+ ".intadd " + k + "_count 1\n}\n";
}
};
// Print Report
for (var k in this.needed) {
console.log(k);
this.dtf += ".echo @" + k + "_count@ - "
+ this.get_asset(k).description + "\n";
};
// Free the variables
for (var k in this.needed) {
this.dtf += ".varfree " + k + "_count\n";
};
this.dtf += "%%\n";
};
},
controllerAs: 'assetCtrl'
};
return o;
});
})();<|fim▁end|> | for (var k in this.needed) {
this.dtf += ".intsto " + k + "_count 0\n";
}; |
<|file_name|>views.js<|end_file_name|><|fim▁begin|>module.exports = function(fancyRequire) {<|fim▁hole|><|fim▁end|> | fancyRequire('merchant_row');
}; |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
########################################################################
# File based on https://github.com/Blosc/bcolz
########################################################################
#
# License: BSD
# Created: October 5, 2015
# Author: Carst Vaartjes - [email protected]
#
########################################################################
import codecs
import os
from setuptools import setup, Extension, find_packages
from os.path import abspath
from sys import version_info as v
from setuptools.command.build_ext import build_ext as _build_ext
# Check this Python version is supported
if any([v < (2, 6), (3,) < v < (3, 5)]):
raise Exception("Unsupported Python version %d.%d. Requires Python >= 2.7 "
"or >= 3.5." % v[:2])
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
# Prevent numpy from thinking it is still in its setup process:
__builtins__.__NUMPY_SETUP__ = False
import numpy
self.include_dirs.append(numpy.get_include())
HERE = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
"""
Build an absolute path from *parts* and and return the contents of the
resulting file. Assume UTF-8 encoding.
"""
with codecs.open(os.path.join(HERE, *parts), "rb", "utf-8") as f:
return f.read()
def get_version():<|fim▁hole|> with open("bqueryd/version.py") as fp:
exec (fp.read(), version)
return version
# Sources & libraries
inc_dirs = [abspath('bqueryd')]
try:
import numpy as np
inc_dirs.append(np.get_include())
except ImportError as e:
pass
lib_dirs = []
libs = []
def_macros = []
sources = []
cmdclass = {'build_ext': build_ext}
optional_libs = ['numexpr>=2.6.9']
install_requires = [
'bquery>=0.2.10',
'pyzmq>=17.1.2',
'redis>=3.0.1',
'boto3>=1.9.82',
'smart_open>=1.9.0',
'netifaces>=0.10.9',
'configobj>=5.0.6',
'psutil>=5.0.0',
'azure-storage-blob==12.0.0',
]
setup_requires = []
tests_requires = [
'pandas>=0.23.1',
'pytest>=4.0.0',
'pytest-cov>=2.6.0',
'codacy-coverage>=1.3.7',
]
extras_requires = []
ext_modules = []
package_data = {}
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Operating System :: Microsoft :: Windows',
'Operating System :: Unix',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
setup(
name="bqueryd",
version=get_version()['__version__'],
description='A distribution framework for Bquery',
long_description=read("README.md"),
long_description_content_type='text/markdown',
classifiers=classifiers,
author='Carst Vaartjes',
author_email='[email protected]',
maintainer='Carst Vaartjes',
maintainer_email='[email protected]',
url='https://github.com/visualfabriq/bqueryd',
license='GPL2',
platforms=['any'],
ext_modules=ext_modules,
cmdclass=cmdclass,
install_requires=install_requires,
setup_requires=setup_requires,
tests_require=tests_requires,
extras_require=dict(
optional=extras_requires,
test=tests_requires
),
packages=find_packages(),
package_data=package_data,
include_package_data=True,
zip_safe=True,
entry_points={
'console_scripts': [
'bqueryd = bqueryd.node:main'
]
}
)<|fim▁end|> | version = {} |
<|file_name|>livepush.py<|end_file_name|><|fim▁begin|>import time
import sqlalchemy as SA
import pushmanager.core.db as db
import pushmanager.core.util
from pushmanager.core.mail import MailQueue
from pushmanager.core.rb import RBQueue
from pushmanager.core.requesthandler import RequestHandler
class LivePushServlet(RequestHandler):
def _arg(self, key):
return pushmanager.core.util.get_str_arg(self.request, key, '')
def post(self):
if not self.current_user:
return self.send_error(403)
self.pushid = pushmanager.core.util.get_int_arg(self.request, 'id')
push_query = db.push_pushes.update().where(db.push_pushes.c.id == self.pushid).values({
'state': 'live',
'modified': time.time(),
})
request_query = db.push_requests.update().where(SA.and_(
db.push_requests.c.state == 'blessed',
SA.exists(
[1],
SA.and_(
db.push_pushcontents.c.push == self.pushid,
db.push_pushcontents.c.request == db.push_requests.c.id,
)
))).values({
'state': 'live',
'modified': time.time(),
})
reset_query = db.push_requests.update().where(
SA.exists(
[1],
SA.and_(
db.push_requests.c.state == 'pickme',
db.push_pushcontents.c.push == self.pushid,
db.push_pushcontents.c.request == db.push_requests.c.id,
)
)).values({
'state': 'requested',
})
delete_query = db.push_pushcontents.delete().where(
SA.exists([1], SA.and_(
db.push_pushcontents.c.push == self.pushid,
db.push_pushcontents.c.request == db.push_requests.c.id,
db.push_requests.c.state == 'requested',
)))
live_query = db.push_requests.select().where(
SA.and_(db.push_requests.c.state == 'live',
db.push_pushcontents.c.push == self.pushid,
db.push_pushcontents.c.request == db.push_requests.c.id)
)
db.execute_transaction_cb(
[push_query, request_query, reset_query, delete_query, live_query],
self.on_db_complete,
)
def on_db_complete(self, success, db_results):
self.check_db_results(success, db_results)
_, _, _, _, live_requests = db_results
for req in live_requests:
if req['reviewid']:
review_id = int(req['reviewid'])
RBQueue.enqueue_review(review_id)
if req['watchers']:
user_string = '%s (%s)' % (req['user'], req['watchers'])
users = [req['user']] + req['watchers'].split(',')
else:
user_string = req['user']
users = [req['user']]
msg = (
"""<|fim▁hole|> %(pushmaster)s has certified request for %(user)s as stable in production:
</p>
<p>
<strong>%(user)s - %(title)s</strong><br />
<em>%(repo)s/%(branch)s</em>
</p>
<p>
Regards,<br />
PushManager
</p>"""
) % pushmanager.core.util.EscapedDict({
'pushmaster': self.current_user,
'user': user_string,
'title': req['title'],
'repo': req['repo'],
'branch': req['branch'],
})
subject = "[push] %s - %s" % (user_string, req['title'])
MailQueue.enqueue_user_email(users, msg, subject)<|fim▁end|> | <p> |
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># Generated by Django 2.2.5 on 2019-09-25 17:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('testproducts', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProductRequest',<|fim▁hole|> fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_date', models.DateTimeField(auto_now_add=True)),
('email', models.EmailField(blank=True, help_text='Optional email of the customer who made the request', max_length=254, null=True)),
('variant', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='requests', to='testproducts.ProductVariant')),
],
),
]<|fim▁end|> | |
<|file_name|>settings.component.spec.ts<|end_file_name|><|fim▁begin|>import { TestBed, ComponentFixture } from '@angular/core/testing';
import { RouterTestingModule } from '@angular/router/testing';
import { HttpClientTestingModule } from '@angular/common/http/testing';
import { FormsModule } from '@angular/forms';
import { Title } from '@angular/platform-browser';
import { SettingsComponent } from 'src/app/settings/settings.component';
import { SettingsModule } from 'src/app/settings/settings.module';
import { SettingsService } from 'src/app/settings/settings.service';
import { StringsService } from 'src/app/shared/services';
describe('Settings', () => {
let component: SettingsComponent;
let fixture: ComponentFixture<SettingsComponent>;
<|fim▁hole|> TestBed.configureTestingModule({
imports: [
RouterTestingModule,
HttpClientTestingModule,
FormsModule,
SettingsModule
],
providers: [
Title,
SettingsService,
{
provide: StringsService,
useValue: {
stringsChanged: {
subscribe: (fn: any) => {
fn({ settings: 'Settings' });
return { unsubscribe: () => {} };
}
}
}
}
]
}).compileComponents();
});
beforeEach(() => {
fixture = TestBed.createComponent(SettingsComponent);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('sets the title when constructed', () => {
expect((component as any).title.getTitle()).toEqual('TaskBoard - Settings');
});
});<|fim▁end|> | beforeEach(() => { |
<|file_name|>gp-stats.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
import { HttpClient, HttpHeaders } from '@angular/common/http';
import { Observable, of } from 'rxjs';
import {catchError, map, shareReplay, tap} from 'rxjs/operators';
import {GroupProductionStats} from './gp-stats';
const CACHE_SIZE = 1;
@Injectable({
providedIn: 'root'
})
export class GPStatsService {
private gpStatsUrl = '/gpdeletion/gpstats';
private gpLastUpdateTimeUrl = '/gpdeletion/last_update_time_group_production';
private cache$: Observable<GroupProductionStats[]>;
constructor(
private http: HttpClient){}
getGPStats(): Observable<GroupProductionStats[]> {
if (!this.cache$) {
this.cache$ = this.requestGPStats().pipe(
shareReplay(CACHE_SIZE)
);
}
return this.cache$;
}
private requestGPStats(): Observable<GroupProductionStats[]> {
return this.http.get<GroupProductionStats[]>(this.gpStatsUrl)
.pipe(
tap(_ => this.log(`fetched stats `)),
catchError(this.handleError<GroupProductionStats[]>('getGPStats', []))
);<|fim▁hole|> }
GPLastUpdateTime(): Observable<string> {
return this.http.get<string>(this.gpLastUpdateTimeUrl)
.pipe(
tap(_ => this.log(`fetched update time `)),
catchError(this.handleError<string>('GPLastUpdateTime', ''))
);
}
private handleError<T>(operation = 'operation', result?: T) {
return (error: any): Observable<T> => {
// TODO: send the error to remote logging infrastructure
console.error(error); // log to console instead
// TODO: better job of transforming error for user consumption
this.log(`${operation} failed: ${error.message}`);
// Let the app keep running by returning an empty result.
return of(result as T);
};
}
private log(message: string) {
console.log(`GPStatsService: ${message}`);
}
}<|fim▁end|> | |
<|file_name|>timeside-create-admin-user.py<|end_file_name|><|fim▁begin|>from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
class Command(BaseCommand):
help = """Create a default admin user if it doesn't exist.
you SHOULD change the password, the email and the token afterwards!"""
username = 'admin'
password = 'admin'
email = '[email protected]'
def handle(self, *args, **options):
verbosity = options.get('verbosity')
admin = User.objects.filter(username=self.username)
if not admin:
user = User.objects.create_user(username=self.username,<|fim▁hole|> user.is_staff = True
user.save()
if verbosity:
print('User "%s" created"' % self.username)
if Token.objects.get(user=user):
print('Token created for User "%s"' % self.username)<|fim▁end|> | email=self.email,
password=self.password)
user.is_superuser = True |
<|file_name|>d9.js<|end_file_name|><|fim▁begin|><|fim▁hole|>file:/home/charlike/dev/glob-fs/fixtures/a/d9.js<|fim▁end|> | |
<|file_name|>RSSParser.java<|end_file_name|><|fim▁begin|>/*
* PHEX - The pure-java Gnutella-servent.
* Copyright (C) 2001 - 2006 Arne Babenhauserheide ( arne_bab <at> web <dot> de )
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* Created on 08.02.2005
* --- CVS Information ---
* $Id: RSSParser.java 3682 2007-01-09 15:32:14Z gregork $
*/
package phex.util;
<|fim▁hole|>import java.io.PushbackReader;
import java.io.Reader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class RSSParser {
/** This Class reads out RSS-files passed to it and
* collects them in the array "magnets[]".
* Check the usage in phex.gui.dialogs.NewDowloadDialog
*/
/**
* List of possible EOL characters, not exactly according to YAML 4.1.4
*/
private static final String EOL_CHARACTERS = "\r\n";
private static final char[] AMPERSAND_AMP = new char[]
{'a', 'm', 'p', ';'};
private static final String START_OF_ELEMENT_CHAR = "<";
private static final String END_OF_ELEMENT_CHAR = ">";
private static final String END_OF_ELEMENT_CHARN = "/";
private static final char[] XML_LINE = new char[]
{'<', '?', 'x', 'm', 'l'};
private static final char[] MAGNET_PREFIX = new char[]
{'m', 'a', 'g', 'n', 'e', 't'};
private static final char[] HTTP_PREFIX = new char[]
{'h', 't', 't', 'p', ':', '/'};
private static final char[] MAGNET_TAG = new char[]
{'<', 'm', 'a', 'g', 'n', 'e', 't', '>'};
private static final char[] ENCLOSURE_TAG_START = new char[]
{'<', 'e', 'n', 'c', 'l', 'o', 's', 'u'};
private static final char[] ENCLOSURE_TAG_MID = new char[]
{'r', 'e'};
private static final char[] URL_IDENTIFIER = new char[]
{'u', 'r', 'l', '=', '"'}; //"
private static final char[] ITEM_ELEMENT = new char[]
{'<', 'i', 't', 'e', 'm', '>',};
private static final char[] END_OF_ITEM_ELEMENT = new char[]
{'<', '/', 'i', 't', 'e', 'm', '>',};
private static final char[] RSS_TAG = new char[]
{'<', 'r', 's', 's', '>',};
private static final char[] END_OF_RSS_TAG = new char[]
{'<', '/', 'r', 's', 's', '>',};
private final PushbackReader reader;
private final List<String> magnets;
public RSSParser(Reader reader) {
magnets = new ArrayList<String>();
this.reader = new PushbackReader(reader, 6);
}
public void start()
throws IOException {
try {
/* The FileReader checks, if the File begins with "#MAGMA"
* and sends the characters following the "#MAGMA" to the
* listFinder.
*/
char buff[] = new char[5];
int readBytes = 0;
while (readBytes != 5) {
int count = reader.read(buff, readBytes, 5);
if (count == -1) {
throw new IOException("Input file is no XML-File ("
+ String.valueOf(buff) + ").");
}
readBytes += count;
}
if (Arrays.equals(buff, XML_LINE)) {
parseXml();
}
} finally {
reader.close();
}
}
public List getMagnets() {
// Can be called to get the included magnets
return magnets;
}
private void parseXml()
throws IOException {
int pos = 0;
int c;
while (true) {
c = reader.read();
if (c == RSS_TAG[pos]) {
pos++;
if (pos == RSS_TAG.length) {
// found rss-tag.. find the first item.
parseList();
pos = 0;
}
} else if (c == -1) {
// reached the end...
return;
} else {// next char of rss tag not found... skip line...
pos = 0;
//skipToEndOfObject();
parseList(); // ignore that this is careless
}
}
}
private void parseList()
throws IOException {
int pos = 0;
int c;
while (true) {
c = reader.read();
if (c == ITEM_ELEMENT[pos]) {
pos++;
if (pos == ITEM_ELEMENT.length) {
// found list: element.. skip line and continue to parse body.
parseItemBody();
pos = 0;
}
} else if (c == END_OF_RSS_TAG[pos]) {
pos++;
if (pos == END_OF_RSS_TAG.length) {
// RSS_TAG ended.
pos = 0;
return;
}
} else if (c == -1) {
// reached the end...
return;
} else {// next char of list element not found... skip line...
pos = 0;
}
}
}
public void parseItemBody()
throws IOException {
int c;
int pos = 0;
while (true) {
c = reader.read();
if (c == MAGNET_TAG[pos]) {
pos++;
if (pos == MAGNET_TAG.length) {
// we found a magnet-element
// pre check if this really is a magnet..
char buff[] = new char[6];
int readBytes = 0;
while (readBytes != 6) {
int count = reader.read(buff, readBytes, 6);
if (count == -1) {
return;
}
readBytes += count;
}
reader.unread(buff);
if (Arrays.equals(buff, MAGNET_PREFIX)) {
// reached quoted magnet
pos = 0;
parseMagnet();
} else if (Arrays.equals(buff, HTTP_PREFIX)) {
// reached quoted magnet
pos = 0;
parseMagnet();
} else {
// skip to the end of this magnet-tag,
// it doesn't contain a magnet nor a http-uri.
}
pos = 0;
}
}
/**
* Code to read out enclosures with
* http- or magnet-uris doesn't work yet.
*/
else if (c == ENCLOSURE_TAG_START[pos]) {
pos++;
if (pos == ENCLOSURE_TAG_START.length) {
// we found an enclosure-tag
// pre check if this contains a magnet or http-url..
pos = 0;
while (true) {
c = reader.read();
//go forward up to the end of the URL-identifier.
if (c == URL_IDENTIFIER[pos]) {
pos++;
if (pos == URL_IDENTIFIER.length) { //this containis an url-identifier.
// check for magnet or http-start.
char buff[] = new char[6];
int readBytes = 0;
while (readBytes != 6) {
int count = reader.read(buff, readBytes, 6);
if (count == -1) {
return;
}
readBytes += count;
}
reader.unread(buff);
if (Arrays.equals(buff, MAGNET_PREFIX)) {
// reached quoted magnet
pos = 0;
parseMagnet();
break;
} else if (Arrays.equals(buff, HTTP_PREFIX)) {
// reached quoted http-url
pos = 0;
parseMagnet();
break;
}
}
} else if (END_OF_ELEMENT_CHAR.indexOf(c) != -1) { //return if we reached the end of the enclosure.
pos = 0;
break;
} else if (c == -1) {
pos = 0;
return;
} else {
pos = 0;
}
} // end of inner while (true)
} else // pos != ENCLOSURE_TAG_START.length
{
// next letter
}
} else if (c == -1) {
// reached the EOF
pos = 0;
return;
}
/**
* Commented it out, because it creaded an Array Out of Bounds error
* ToDo: Catch the Error and read out the titles of the items to use them along the magnets.
* ToDo: Read out the content of the rss-items, so they can be shown alongside the magnet in the list (best in a tooltip).
*/
else if (pos <= 6 && c == END_OF_ITEM_ELEMENT[pos]) {
pos++;
if (pos == END_OF_ITEM_ELEMENT.length) {
// the item ended.
pos = 0;
return;
}
}
/*
**/
else {
pos = 0; // didn't continue as magnet or enclosure tag, reset pos.
}
} //end of of while-loop
}
public void parseMagnet()
throws IOException {
StringBuffer magnetBuf = new StringBuffer();
int c;
while (true) {
c = reader.read();
if (c == ' ' || EOL_CHARACTERS.indexOf(c) != -1) {// skip all line folding characters.. and all spaces
continue;
} else if (c == '<') {// found the end of the magnet.
break;
}
/**
* only necessary when we are able to read out enclosures.
*/
else if (c == '"') //"
{ // found the end of the magnet.
break;
} else if (c == -1) {
// unexpected end...
return;
} else if (c == '&') {
char buff[] = new char[4];
int readBytes = 0;
while (readBytes != 4) {
int count = reader.read(buff, readBytes, 4);
if (count == -1) {
return;
}
readBytes += count;
}
if (Arrays.equals(buff, AMPERSAND_AMP)) {
// reached quoted magnet
magnetBuf.append('&');
} else {
reader.unread(buff);
magnetBuf.append((char) c);
}
} else {
magnetBuf.append((char) c);
}
}
magnets.add(magnetBuf.toString());
}
/**
* Skips all content till end of line.
*/
private void skipToEndOfObject() throws IOException {
// Only gets the next ending of any object. Could be improved to get
// the end of a specific object supplied by the calling funktion.
// At the moment ends either with "/>" or with "</"
int c;
while (true) {
c = reader.read();
if (c < 0) {// stream ended... a valid line could not be read... return
return;
} else if (START_OF_ELEMENT_CHAR.indexOf(c) != -1) {// we found a possble end o the object... check if there are followups
c = reader.read();
if (END_OF_ELEMENT_CHARN.indexOf(c) != -1) {
return;
} else {
// the last character was no End of Element char... push it back
reader.unread(c);
}
} else if (END_OF_ELEMENT_CHARN.indexOf(c) != -1) {// we found a possble end o the object... check if there are followups
c = reader.read();
if (END_OF_ELEMENT_CHAR.indexOf(c) != -1) {
return;
} else {
// the last character was no End of Element char... push it back
reader.unread(c);
}
}
}
}
}<|fim▁end|> | import java.io.IOException; |
<|file_name|>signals.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//#![feature(macro_rules)]
//#![macro_escape]
#![allow(unused_imports)]
#![allow(unused_variables)]
use std::mem::transmute;
use libc::c_void;
use gtk::ffi;
use gdk;
use glib;
use gtk;
use cairo;
use std::any::Any;
pub trait Signal<'a>{
fn get_signal_name(&self) -> &str;
fn get_trampoline(&self) -> extern "C" fn();
fn fetch_cb(&self) -> *mut ||;
fn get_user_data<'b>(&'b self) -> &'b Option<Box<Any + 'a>>;
}
// The defintion of the signal macro is split in a argumentless and
// one-with-arguments case because of a limitation in the rust macro
// system.
macro_rules! signal(
//Signals without arguments
($signal:ident, $class:ident () -> $ret_type:ty) => (
//General case (see below)
signal!($signal, $class [] -> $ret_type)
mod $signal{
use std::mem::transmute;
use libc::c_void;
use gtk::ffi;
use gdk;
use glib;
use gtk;
use cairo;
use std::any::Any;
pub extern fn trampoline(widget : *mut ffi::C_GtkWidget, signal: *mut Box<super::Signal>) -> $ret_type{
unsafe{
match (*signal).get_user_data(){
&Some(ref user_data) => {
let cb : *mut |*const c_void| -> $ret_type = transmute((*signal).fetch_cb());
(*cb)(::std::mem::transmute(user_data))
},
&None => {
let cb : *mut || -> $ret_type = transmute((*signal).fetch_cb());
(*cb)()
},
}
}
}
}
);
//Signals with arguments
($signal:ident, $class:ident ( $($arg_name:ident : $arg_type:ty),* ) -> $ret_type:ty) => (
//General case (see below)
signal!($signal, $class [$(($arg_name : $arg_type)),*] -> $ret_type)
mod $signal{
use std::mem::transmute;
use libc::c_void;
use gtk::ffi;
use gdk;
use glib;
use gtk;
use cairo;
use std::any::Any;
pub extern fn trampoline(widget : *mut ffi::C_GtkWidget, $($arg_name : $arg_type),* , signal: *mut Box<super::Signal>) -> $ret_type{
unsafe{
match (*signal).get_user_data(){
&Some(ref user_data) => {
let cb : *mut |$($arg_type),*, *const c_void| -> $ret_type = transmute((*signal).fetch_cb());
(*cb)($($arg_name),*, ::std::mem::transmute(user_data))
},
&None => {
let cb : *mut |$($arg_type),*| -> $ret_type = transmute((*signal).fetch_cb());
(*cb)($($arg_name),*)
},
}
}
}
}
);
//TODO custom trampoline
($signal:ident, $class:ident ( $($arg_name:ident : $arg_type:ty),* ) -> $ret_type:ty,
trampoline ( $($t_arg_nm:ident : $t_arg_ty:ty),* ) -> $t_ret_ty:ty $t_blck:expr) => (
//General case (see below)
signal!($signal, $class [$(($arg_name : $arg_type)),*] -> $ret_type)
mod $signal{
#[allow(unused_imports)]
#[allow(unused_variables)]
use std::mem::transmute;
use libc::c_void;
use gtk::ffi;
use gdk;
use glib;
use gtk;
use cairo;
use std::any::Any;
pub extern fn trampoline(widget: *mut ffi::C_GtkWidget, $($t_arg_nm : $t_arg_ty),* , signal: *mut Box<super::Signal>) -> $t_ret_ty{
unsafe{
match (*signal).get_user_data(){
&Some(ref user_data) => {
let cb: *mut |$($arg_type),*, *const c_void| -> $ret_type = transmute((*signal).fetch_cb());
let cont = |$($arg_name: $arg_type),*| {
(*cb)($($arg_name),* , ::std::mem::transmute(user_data))
};
let custom_trampoline = $t_blck;
custom_trampoline(cont)
},
&None => {
let cb : *mut |$($arg_type),*| -> $ret_type = transmute((*signal).fetch_cb());
let cont = |$($arg_name: $arg_type),*| {
(*cb)($($arg_name),*)
};
let custom_trampoline = $t_blck;
custom_trampoline(cont)
},
}
}
}
}
);
//General case
($signal:ident, $class:ident [ $(($arg_name:ident : $arg_type:ty)),* ] -> $ret_type:ty) => (
pub struct $class<'a>{
pub cb: |$($arg_type),*|:'a -> $ret_type,
pub user_data: Option<Box<Any + 'a>>
}
impl<'a> $class<'a>{
pub fn new (cb : |$($arg_type),* |:'a -> $ret_type) -> Box<$class<'a>> {
box $class{
cb: cb,
user_data: None
}
}
//TODO: Rust lexer bug here, can't parse the middel `,` in `|$($arg_type),* , Box<Any>|`
/*pub fn new_with_data (user_data: Box<Any>, cb: |$($arg_type),*, Box<Any>|:'a -> $ret_type) -> Box<Signal<'a>> {
box $class{
cb: cb,
user_data: user_data
} as Box<Signal<'a>>
}*/
}
impl<'a> Signal<'a> for $class<'a>{
fn get_signal_name(&self) -> &str {
stringify!($signal)
}
fn get_trampoline(&self) -> extern "C" fn(){
unsafe{
transmute(self::$signal::trampoline)
}
}
fn fetch_cb(&self) -> *mut ||{
unsafe{
transmute(&self.cb)
}
}
fn get_user_data<'b>(&'b self) -> &'b Option<Box<Any + 'a>>{
&self.user_data
}
}
);
)
//GObject
//https://developer.gnome.org/gobject/unstable/gobject-The-Base-Object-Type.html#gobject-The-Base-Object-Type.signals
signal!(notify, Notify(g_param_spec:c_void) -> ())
//GtkWidget
//https://developer.gnome.org/gtk3/stable/GtkWidget.html#GtkWidget.signals
signal!(accel_closures_changed, AccelClosuresChanged(spec : glib::ParamSpec) -> ())
signal!(can_activate_accel, CanActivateAccel(signal_id:uint) -> bool)
signal!(child_notify, ChildNotify(spec : glib::ParamSpec) -> ())
signal!(composited_changed, CompositedChanged() -> ())
signal!(destroy, Destroy() -> ())
signal!(direction_changed, DirectionChanged(previous_direction: gtk::TextDirection) -> ())
signal!(draw, Draw(ctx: cairo::Context) -> (), trampoline(ctx_raw: *mut cairo::ffi::cairo_t) -> () |cb: |cairo::Context|| {
cb(cairo::Context::wrap(ctx_raw))
})
signal!(focus, Focus(direction : gtk::DirectionType) -> bool)
signal!(grab_focus, GrabFocus() -> ())
signal!(grab_notify, GrabNotify(was_grabbed : bool) -> ())
signal!(hide, Hide() -> ())
signal!(keynav_failed, KeynavFailed(direction : gtk::DirectionType) -> bool)
signal!(map, Map() -> ())
signal!(mnemonic_activate, MnemonicActivate(arg : bool) -> bool)
signal!(move_focus, MoveFocus(direction : gtk::DirectionType) -> ())
signal!(popup_menu, PopupMenu() -> bool)
signal!(query_tooltip, QueryTooltip(x:int, y:int, keyboard_mode:bool, tooltip : *mut gtk::Tooltip) -> bool)
signal!(realize, Realize() -> ())
signal!(screen_changed, ScreenChanged(previous_screen : *mut gdk::Screen) -> ())
signal!(show, Show() -> ())
signal!(show_help, ShowHelp(help_type : gtk::WidgetHelpType) -> bool)
signal!(size_allocate, SizeAllocate(allocation : *mut gdk::Rectangle) -> ())
signal!(state_chagned, StateChagned(state : gtk::StateType) -> ())
signal!(state_flags_changed, StateFlagsChanged(flags : gtk::StateFlags) -> ())
signal!(style_updated, StyleUpdated() -> ())
signal!(unmap, Unmap() -> ())
signal!(unrealize, Unrealize() -> ())
/*
signal!(hierarchy_changed, HierarchyChanged(previous_toplevel : *gtk::Widget) -> (),
trampoline (previous_toplevel : *mut ffi::C_GtkWidget) -> Box<gtk::Widget> {
cb(previous_toplevel)
}
)
signal!(parent_set, ParentSet(old_parent : *gtk::Widget) -> ())
*/
//GtkWidget: GDK events
signal!(button_press_event, ButtonPressEvent(event : *mut gdk::EventButton) -> bool)
signal!(button_release_event, ButtonReleaseEvent(event : *mut gdk::EventButton) -> bool)
signal!(configure_event, ConfigureEvent(event : *mut gdk::EventConfigure) -> bool)
signal!(damage_event, DamageEvent(event : *mut gdk::EventExpose) -> bool)
signal!(delete_event, DeleteEvent(event : *mut gdk::EventAny) -> bool)
signal!(destroy_event, DestroyEvent(event : *mut gdk::EventAny) -> bool)
signal!(enter_notify_event, EnterNotifyEvent(event : *mut gdk::EventCrossing) -> bool)
signal!(leave_notify_event, LeaveNotifyEvent(event : *mut gdk::EventCrossing) -> bool)
signal!(event, Event(event : *mut gdk::EventAny) -> bool)
signal!(event_after, EventAfter(event : *mut gdk::EventAny) -> bool)
signal!(focus_in_event, FocusInEvent(event : *mut gdk::EventFocus) -> bool)
signal!(focus_out_event, FocusOutEvent(event : *mut gdk::EventFocus) -> bool)
signal!(grab_broken_event, GrabBrokenEvent(event : *mut gdk::EventGrabBroken) -> bool)
signal!(key_press_event, KeyPressEvent(event : *mut gdk::EventKey) -> bool)
signal!(key_release_event, KeyReleaseEvent(event : *mut gdk::EventKey) -> bool)
signal!(map_event, MapEvent(event : *mut gdk::EventAny) -> bool)
signal!(motion_notify_event, MotionNotifyEvent(event : *mut gdk::EventMotion) -> bool)
signal!(property_notify_event, PropertyNotifyEvent(event : *mut gdk::EventProperty) -> bool)
signal!(proximity_in_event, ProximityInEvent(event : *mut gdk::EventProximity) -> bool)
signal!(proximity_out_event, ProximityOutEvent(event : *mut gdk::EventProximity) -> bool)
signal!(scroll_event, ScrollEvent(event : *mut gdk::EventScroll) -> bool)
signal!(touch_event, TouchEvent(event : *mut gdk::EventTouch) -> bool)
signal!(unmap_event, UnmapEvent(event : *mut gdk::EventAny) -> bool)
signal!(window_state_event, WindowStateEvent(event : *mut gdk::EventWindowState) -> bool)
//GtkWidget: Drag-drop
/*
signal!(drag_begin, DragBegin(context : *mut gdk::DragContext) -> ())
signal!(drag_data_delete, DragDataDelete(context : *mut gdk::DragContext) -> ())
signal!(drag_data_get, DragDataGet(context : *mut gdk::DragContext,
data : *gtk::SelectionData,
info: uint,
time: uint) -> ())
signal!(drag_data_received, DragDataReceived(context : *mut gdk::DragContext,
x: int,
y: int,
data: *gtk::SelectionData,
info: uint,
time: uint) -> ())
signal!(drag_drop, DragDrop(context : *mut gdk::DragContext,
x: int,
y: int,
time: uint) -> ())
signal!(drag_end, DragEnd(context : *mut gdk::DragContext) -> ())
signal!(drag_failed, DragFailed(context : *mut gdk::DragContext, result: gtk::DragResult) -> ())
signal!(drag_leave, DragLeave(context : *mut gdk::DragContext, time : uint) -> ())
signal!(drag_motion, DragMotion(context : *mut gdk::DragContext,
x : int,
y : int,<|fim▁hole|>
//GtkWidget: Selection
signal!(selection_get, SelectionGet(data : *gtk::SelectionData, info: uint, time: uint) -> ())
signal!(selection_received, SelectionReceived(data : *gtk::SelectionData, time: uint) -> ())
signal!(selection_clear_event, SelectionClearEvent(event : *mut gdk::EventSelection) -> bool)
signal!(selection_request_event,SelectionRequestEvent(event : *mut gdk::EventSelection) -> bool)
signal!(selection_notify_event, SelectionNotifyEvent(event : *mut gdk::EventSelection) -> bool)
*/
//GtkContainer
//signal!(add, Add(widget: *gtk::Widget) -> ())
signal!(check_resize, CheckResize() -> ())
//signal!(remove, Remove(widget: *gtk::Widget) -> ())
//signal!(set_focus_child, SetFocusChild(widget: *gtk::Widget) -> ())
//GtkButton
signal!(activate, Activate() -> ())
signal!(clicked, Clicked() -> ())
signal!(enter, Enter() -> ())
signal!(leave, Leave() -> ())
signal!(pressed, Pressed() -> ())
signal!(released, Released() -> ())
//GtkDialog
signal!(response, Response(response_id : int) -> ())
//GtkAdjustment
//https://developer.gnome.org/gtk3/stable/GtkAdjustment.html#GtkAdjustment.signals
signal!(value_changed, ValueChanged() -> ())
// SpinButton
signal!(changed_value, ChangedValue() -> ())
signal!(wrapped, Wrapped() -> ())
//gint input Run Last
//gboolean output Run Last
// Range
signal!(adjust_bounds, AdjustBounds() -> ())
signal!(move_slider, MoveSlider() -> ())<|fim▁end|> | time : uint) -> bool) |
<|file_name|>jim_chat.py<|end_file_name|><|fim▁begin|>import json_creator
class Message:
def __init__(self, sent_to, sent_from, text):
self.sent_from = sent_from
self.text = text
self.sent_to = sent_to
def parse_message(self, message_dict):
self.sent_from = json_creator.get_message_sendfrom(message_dict)
self.sent_to = json_creator.get_message_sendto(message_dict)
self.text = json_creator.get_message_text(message_dict)
def get_message(self):
return json_creator.get_message(self.sent_to, self.sent_from,
self.text)
class Chat:<|fim▁hole|> self.messages = []
def add_client(self, user):
self.clients.append(user)
def remove_client(self, user):
self.clients.remove(user)
class Client:
def __init__(self):
self.chat = []
self.username = ''
def set_username(self, username):
self.username = username
def send_message(self, sendto, message):
new_message = Message(sendto, self.username, message)<|fim▁end|> | def __init__(self):
self.clients = [] |
<|file_name|>Gridbox.tsx<|end_file_name|><|fim▁begin|>import {
Box,
Button,
Card,
Checkbox,
Chip,
styled,
Typography,
} from "@material-ui/core";
import { fetchResolver } from "@penrose/components";
import {
compileTrio,
evalEnergy,
PenroseState,
prepareState,
prettySubstance,
RenderStatic,
resample,
showError,
showMutations,
stepUntilConvergence,
SynthesizedSubstance,
} from "@penrose/core";
import React from "react";
export interface GridboxProps {
domain: string;
style: string;
substance: SynthesizedSubstance;
variation: string;
progNumber: number;
srcState: PenroseState | undefined;
updateSrcProg: (newState: PenroseState) => void;
onStaged: (n: number, s: string) => void;
}
const Section = styled(Card)(({ theme }) => ({
margin: "0.5rem",
width: "25rem",
height: "25rem",
borderColor: theme.palette.primary.main,
borderWidth: "2px",
borderStyle: "outset",
color: theme.palette.primary.main,
borderRadius: "5px",
}));
const LowEnergy = styled(Chip)(({ theme }) => ({
background: theme.palette.success.main,
color: "white",
}));
const HighEnergy = styled(Chip)(({ theme }) => ({
background: theme.palette.error.light,
color: "white",
}));
const Header = styled(Box)(({ theme }) => ({
color: theme.palette.primary.main,
width: "calc(100% - .75rem)",
height: "1.75rem",
borderBottom: "1px solid black",
fontSize: "1.25rem",
display: "flex",
flexDirection: "row",
justifyContent: "space-between",
padding: "0.5rem 0 0.5rem 0.75rem",
verticalAlign: "text-bottom",
}));
const Body = styled(Box)({
fontFamily: "Roboto Mono, Courier New, sans-serif",
height: "calc(25rem - 4.25rem)",
fontSize: "0.8rem",
color: "black",
overflow: "auto",
whiteSpace: "pre-wrap",
padding: "0.5rem 0.25rem 0.25rem 0.5rem",
});
const H2 = styled(Box)({
borderBottom: "1px solid black",
padding: "0.5rem 0 0.35rem 0",
marginBottom: ".5rem",
fontFamily: "sans-serif",
color: "gray",
});
const HeaderText = styled(Typography)(({ theme }) => ({<|fim▁hole|>}));
const ExportCheckbox = styled(Checkbox)({
padding: "0 0.5rem",
});
const ResampleBtn = styled(Button)({
fontSize: "0.8rem",
padding: "0 0.5rem",
});
interface GridboxState {
showDiagram: boolean;
isSelected: boolean;
diagramSVG: string;
energy: number;
}
export class Gridbox extends React.Component<GridboxProps, GridboxState> {
constructor(props: GridboxProps) {
super(props);
this.state = {
showDiagram: true,
isSelected: false,
diagramSVG: "",
energy: 0,
};
}
computeEnergy = async (optimizedState: PenroseState) => {
if (this.props.srcState) {
const crossState = {
...optimizedState,
constrFns: this.props.srcState.constrFns,
objFns: this.props.srcState.objFns,
};
try {
// resample because initial sampling did not use the special sampling seed
const energy = evalEnergy(await prepareState(crossState));
this.setState({
energy: Math.round(energy),
});
} catch (e) {
console.log("error with CIEE: ", e);
this.setState({
energy: -1,
});
}
}
};
// TODO: this should really be put in a web worker, it blocks browser interaction
async update() {
const res = compileTrio({
substance: prettySubstance(this.props.substance.prog),
style: this.props.style,
domain: this.props.domain,
variation: this.props.variation,
});
if (res.isOk()) {
try {
// https://stackoverflow.com/a/19626821
// setTimeout causes this function to be pushed to bottom of call stack. Since Gridbox
// component is rendered in an array, we want to delay ALL componentDidMount calls until
// after ALL gridboxes have been initially rendered.
await new Promise((r) => setTimeout(r, 1));
// resample because initial sampling did not use the special sampling seed
let state = resample(await prepareState(res.value));
state = resample(state);
const opt = stepUntilConvergence(state);
if (opt.isErr()) {
console.log(showError(opt.error));
throw Error("optimization failed");
}
const optimized = opt.value;
const rendered = await RenderStatic(optimized, fetchResolver);
this.setState({ diagramSVG: rendered.outerHTML });
if (this.props.progNumber === 0) {
// original program is cached by parent component to be used for CIEE with mutated progs
this.props.updateSrcProg(optimized);
} else {
this.computeEnergy(optimized);
}
} catch (e) {
console.log(e);
}
} else {
throw res.error;
}
}
async componentDidMount() {
await this.update();
}
async componentDidUpdate(prevProps: GridboxProps) {
if (this.props.substance.prog !== prevProps.substance.prog) {
await this.update();
}
}
toggleView = () => {
this.setState({ showDiagram: !this.state.showDiagram });
};
checkboxClick = () => {
this.setState({ isSelected: !this.state.isSelected });
this.props.onStaged(this.props.progNumber, this.state.diagramSVG);
};
resample = () => {
this.update();
};
// NOTE: not rendered by default, uncomment in render function to see
energyChip = () => {
return this.state.energy > 10000 || this.state.energy < 0 ? (
<HighEnergy
label={`energy: ${
this.state.energy < 0 ? "Inf" : this.state.energy.toExponential(2)
}`}
size="small"
/>
) : (
<LowEnergy label={`energy: ${this.state.energy}`} size="small" />
);
};
render() {
const stmts = prettySubstance(this.props.substance.prog);
return (
<Section>
<Header>
<HeaderText>
{this.props.progNumber === 0
? "Original Diagram"
: `Mutated Program #${this.props.progNumber}`}
</HeaderText>
<Box>
<ResampleBtn
onClick={this.resample}
variant="contained"
color="primary"
>
Resample
</ResampleBtn>
{/* {this.energyChip()} */}
<ExportCheckbox
name="isStaged"
checked={this.state.isSelected}
onChange={this.checkboxClick}
color="primary"
/>
</Box>
</Header>
<div onClick={this.toggleView}>
{this.state.showDiagram ? (
<div
style={{ width: "100%", height: "100%" }}
dangerouslySetInnerHTML={{
__html: this.state.diagramSVG,
}}
/>
) : (
<Body>
<H2>Mutations</H2>
{this.props.progNumber === 0
? "N/A"
: showMutations(this.props.substance.ops)}
<H2>Substance Program</H2>
{`${stmts}`}
</Body>
)}
</div>
</Section>
);
}
}<|fim▁end|> | color: theme.palette.primary.main,
fontFamily: "Roboto Mono, Helvetica, sans-serif",
verticalAlign: "text-bottom", |
<|file_name|>doc.go<|end_file_name|><|fim▁begin|>/*
Copyright 2021 The KubeVirt Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
<|fim▁hole|>
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
// This package has the automatically generated typed clients.
package v1beta1<|fim▁end|> | http://www.apache.org/licenses/LICENSE-2.0 |
<|file_name|>base64mime.py<|end_file_name|><|fim▁begin|># Copyright (C) 2002-2007 Python Software Foundation
# Author: Ben Gertzfield
# Contact: [email protected]
"""Base64 content transfer encoding per RFCs 2045-2047.
This module handles the content transfer encoding method defined in RFC 2045
to encode arbitrary 8-bit data using the three 8-bit bytes in four 7-bit
characters encoding known as Base64.
It is used in the MIME standards for email to attach images, audio, and text
using some 8-bit character sets to messages.
This module provides an interface to encode and decode both headers and bodies
with Base64 encoding.
RFC 2045 defines a method for including character set information in an
`encoded-word' in a header. This method is commonly used for 8-bit real names
in To:, From:, Cc:, etc. fields, as well as Subject: lines.
This module does not do the line wrapping or end-of-line character conversion
necessary for proper internationalized headers; it only does dumb encoding and
decoding. To deal with the various line wrapping issues, use the email.header
module.
"""
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future.builtins import range
from future.builtins import bytes
__all__ = [
'body_decode',
'body_encode',
'decode',
'decodestring',
'header_encode',
'header_length',
]
from base64 import b64encode
from binascii import b2a_base64, a2b_base64
CRLF = '\r\n'
NL = '\n'
EMPTYSTRING = ''
# See also Charset.py
MISC_LEN = 7
# Helpers
def header_length(bytearray):
"""Return the length of s when it is encoded with base64."""
groups_of_3, leftover = divmod(len(bytearray), 3)
# 4 bytes out for each 3 bytes (or nonzero fraction thereof) in.
n = groups_of_3 * 4
if leftover:
n += 4<|fim▁hole|>
def header_encode(header_bytes, charset='iso-8859-1'):
"""Encode a single header line with Base64 encoding in a given charset.
charset names the character set to use to encode the header. It defaults
to iso-8859-1. Base64 encoding is defined in RFC 2045.
"""
if not header_bytes:
return ""
if isinstance(header_bytes, str):
header_bytes = header_bytes.encode(charset)
encoded = b64encode(header_bytes).decode("ascii")
return '=?%s?b?%s?=' % (charset, encoded)
def body_encode(s, maxlinelen=76, eol=NL):
r"""Encode a string with base64.
Each line will be wrapped at, at most, maxlinelen characters (defaults to
76 characters).
Each line of encoded text will end with eol, which defaults to "\n". Set
this to "\r\n" if you will be using the result of this function directly
in an email.
"""
if not s:
return s
encvec = []
max_unencoded = maxlinelen * 3 // 4
for i in range(0, len(s), max_unencoded):
# BAW: should encode() inherit b2a_base64()'s dubious behavior in
# adding a newline to the encoded string?
enc = b2a_base64(s[i:i + max_unencoded]).decode("ascii")
if enc.endswith(NL) and eol != NL:
enc = enc[:-1] + eol
encvec.append(enc)
return EMPTYSTRING.join(encvec)
def decode(string):
"""Decode a raw base64 string, returning a bytes object.
This function does not parse a full MIME header value encoded with
base64 (like =?iso-8895-1?b?bmloISBuaWgh?=) -- please use the high
level email.header class for that functionality.
"""
if not string:
return bytes()
elif isinstance(string, str):
return a2b_base64(string.encode('raw-unicode-escape'))
else:
return a2b_base64(string)
# For convenience and backwards compatibility w/ standard base64 module
body_decode = decode
decodestring = decode<|fim▁end|> | return n |
<|file_name|>process_builder.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::ffi::CString;
use std::fmt::{self, Formatter};
use std::io::process::{Command, ProcessOutput, InheritFd};
use std::os;
use std::path::BytesContainer;
use util::{CargoResult, ProcessError, process_error};
#[derive(Clone, PartialEq, Show)]
pub struct ProcessBuilder {
program: CString,
args: Vec<CString>,
env: HashMap<String, Option<CString>>,
cwd: Path,
}
impl fmt::String for ProcessBuilder {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
try!(write!(f, "`{}", String::from_utf8_lossy(self.program.as_bytes())));
for arg in self.args.iter() {
try!(write!(f, " {}", String::from_utf8_lossy(arg.as_bytes())));
}
write!(f, "`")
}
}
impl ProcessBuilder {
pub fn arg<T: BytesContainer>(mut self, arg: T) -> ProcessBuilder {
self.args.push(CString::from_slice(arg.container_as_bytes()));
self
}
pub fn args<T: BytesContainer>(mut self, arguments: &[T]) -> ProcessBuilder {
self.args.extend(arguments.iter().map(|t| {
CString::from_slice(t.container_as_bytes())
}));
self
}
pub fn get_args(&self) -> &[CString] {
self.args.as_slice()
}
pub fn cwd(mut self, path: Path) -> ProcessBuilder {
self.cwd = path;
self
}
pub fn env<T: BytesContainer>(mut self, key: &str,
val: Option<T>) -> ProcessBuilder {
let val = val.map(|t| CString::from_slice(t.container_as_bytes()));
self.env.insert(key.to_string(), val);
self
}
// TODO: should InheritFd be hardcoded?
pub fn exec(&self) -> Result<(), ProcessError> {
let mut command = self.build_command();
command.stdout(InheritFd(1))
.stderr(InheritFd(2))
.stdin(InheritFd(0));
let exit = try!(command.status().map_err(|e| {
process_error(format!("Could not execute process `{}`",
self.debug_string()),
Some(e), None, None)
}));
if exit.success() {
Ok(())
} else {
Err(process_error(format!("Process didn't exit successfully: `{}`",
self.debug_string()),
None, Some(&exit), None))
}
}
pub fn exec_with_output(&self) -> Result<ProcessOutput, ProcessError> {
let command = self.build_command();
let output = try!(command.output().map_err(|e| {
process_error(format!("Could not execute process `{}`",
self.debug_string()),
Some(e), None, None)<|fim▁hole|> } else {
Err(process_error(format!("Process didn't exit successfully: `{}`",
self.debug_string()),
None, Some(&output.status), Some(&output)))
}
}
pub fn build_command(&self) -> Command {
let mut command = Command::new(&self.program);
command.cwd(&self.cwd);
for arg in self.args.iter() {
command.arg(arg);
}
for (k, v) in self.env.iter() {
let k = k.as_slice();
match *v {
Some(ref v) => { command.env(k, v); }
None => { command.env_remove(k); }
}
}
command
}
fn debug_string(&self) -> String {
let mut program = format!("{}", String::from_utf8_lossy(self.program.as_bytes()));
for arg in self.args.iter() {
program.push(' ');
program.push_str(&format!("{}", String::from_utf8_lossy(arg.as_bytes()))[]);
}
program
}
}
pub fn process<T: BytesContainer>(cmd: T) -> CargoResult<ProcessBuilder> {
Ok(ProcessBuilder {
program: CString::from_slice(cmd.container_as_bytes()),
args: Vec::new(),
cwd: try!(os::getcwd()),
env: HashMap::new(),
})
}<|fim▁end|> | }));
if output.status.success() {
Ok(output) |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 Nicholas Bishop
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.<|fim▁hole|>pub mod angle;
pub mod intersect;
pub mod line;
pub mod quad;
pub mod prelude;
pub mod range;
pub mod segment;
pub mod sphere;
pub mod util;
pub mod vec2f;
pub mod vec3f;<|fim▁end|> |
pub mod aabb; |
<|file_name|>pixel_buffer.rs<|end_file_name|><|fim▁begin|>/*!
Pixel buffers are buffers that contain two-dimensional texture data.
Contrary to textures, pixel buffers are stored in a client-defined format. They are used
to transfer data to or from the video memory, before or after being turned into a texture.
*/
use std::borrow::Cow;
use std::cell::Cell;
use std::ops::{Deref, DerefMut};
use backend::Facade;
use GlObject;
use BufferViewExt;
use buffer::{ReadError, BufferView, BufferType};
use gl;
use texture::PixelValue;
use texture::Texture2dDataSink;
/// Buffer that stores the content of a texture.
///
/// The generic type represents the type of pixels that the buffer contains.
pub struct PixelBuffer<T> where T: PixelValue {
buffer: BufferView<[T]>,
dimensions: Cell<Option<(u32, u32)>>,
}
impl<T> PixelBuffer<T> where T: PixelValue {
/// Builds a new buffer with an uninitialized content.
pub fn new_empty<F>(facade: &F, capacity: usize) -> PixelBuffer<T> where F: Facade {
PixelBuffer {
buffer: BufferView::empty_array(facade, BufferType::PixelPackBuffer, capacity,
false).unwrap(),
dimensions: Cell::new(None),
}
}
/// Reads the content of the pixel buffer.
pub fn read_as_texture_2d<S>(&self) -> Result<S, ReadError> where S: Texture2dDataSink<T> {
let dimensions = self.dimensions.get().expect("The pixel buffer is empty");
let data = try!(self.read());
Ok(S::from_raw(Cow::Owned(data), dimensions.0, dimensions.1))
}
}
impl<T> Deref for PixelBuffer<T> where T: PixelValue {
type Target = BufferView<[T]>;
fn deref(&self) -> &BufferView<[T]> {
&self.buffer
}
}
impl<T> DerefMut for PixelBuffer<T> where T: PixelValue {
fn deref_mut(&mut self) -> &mut BufferView<[T]> {
&mut self.buffer
}
}
// TODO: rework this
impl<T> GlObject for PixelBuffer<T> where T: PixelValue {
type Id = gl::types::GLuint;
fn get_id(&self) -> gl::types::GLuint {
self.buffer.get_buffer_id()
}<|fim▁hole|>}
// TODO: remove this hack
#[doc(hidden)]
pub fn store_infos<T>(b: &PixelBuffer<T>, dimensions: (u32, u32)) where T: PixelValue {
b.dimensions.set(Some(dimensions));
}<|fim▁end|> | |
<|file_name|>timer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
Created by Samvel Khalatyan, May 01, 2012
Copyright 2012, All rights reserved
'''
from __future__ import division
import time
class Timer(object):
def __init__(self):
self._calls = 0
self._elapsed = 0
self._start = None
def start(self):
if not self._start:
self._start = time.clock()
def stop(self):
if self._start:
self._elapsed += time.clock() - self._start
self._calls += 1
self._start = None
def calls(self):
return self._calls
def elapsed(self):
return self._elapsed
def __str__(self):
return "avg: {avg:.3f} mkS calls: {calls} total: {total:.6f} S".format(<|fim▁hole|> avg=self.elapsed() / self.calls() * 1e6 if self.calls() else 0,
calls=self.calls(),
total=self.elapsed())<|fim▁end|> | |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use abi;
use ast::{Ident, Generics, Expr};
use ast;
use ast_util;
use attr;
use codemap::{Span, respan, Spanned, DUMMY_SP, Pos};
use ext::base::ExtCtxt;
use owned_slice::OwnedSlice;
use parse::token::special_idents;
use parse::token::InternedString;
use parse::token;
use ptr::P;
// Transitional reexports so qquote can find the paths it is looking for
mod syntax {<|fim▁hole|>}
pub trait AstBuilder {
// paths
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path;
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path;
fn path_all(&self, sp: Span,
global: bool,
idents: Vec<ast::Ident> ,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>> )
-> ast::Path;
// types
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy;
fn ty(&self, span: Span, ty: ast::Ty_) -> P<ast::Ty>;
fn ty_path(&self, ast::Path, Option<OwnedSlice<ast::TyParamBound>>) -> P<ast::Ty>;
fn ty_ident(&self, span: Span, idents: ast::Ident) -> P<ast::Ty>;
fn ty_rptr(&self, span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability) -> P<ast::Ty>;
fn ty_ptr(&self, span: Span,
ty: P<ast::Ty>,
mutbl: ast::Mutability) -> P<ast::Ty>;
fn ty_uniq(&self, span: Span, ty: P<ast::Ty>) -> P<ast::Ty>;
fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty>;
fn ty_infer(&self, sp: Span) -> P<ast::Ty>;
fn ty_nil(&self) -> P<ast::Ty>;
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField;
fn typaram(&self,
span: Span,
id: ast::Ident,
bounds: OwnedSlice<ast::TyParamBound>,
unbound: Option<ast::TyParamBound>,
default: Option<P<ast::Ty>>) -> ast::TyParam;
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef;
fn typarambound(&self, path: ast::Path) -> ast::TyParamBound;
fn lifetime(&self, span: Span, ident: ast::Name) -> ast::Lifetime;
fn lifetime_def(&self,
span: Span,
name: ast::Name,
bounds: Vec<ast::Lifetime>)
-> ast::LifetimeDef;
// statements
fn stmt_expr(&self, expr: P<ast::Expr>) -> P<ast::Stmt>;
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P<ast::Expr>) -> P<ast::Stmt>;
fn stmt_let_typed(&self,
sp: Span,
mutbl: bool,
ident: ast::Ident,
typ: P<ast::Ty>,
ex: P<ast::Expr>)
-> P<ast::Stmt>;
fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> P<ast::Stmt>;
// blocks
fn block(&self, span: Span, stmts: Vec<P<ast::Stmt>>,
expr: Option<P<ast::Expr>>) -> P<ast::Block>;
fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block>;
fn block_all(&self, span: Span,
view_items: Vec<ast::ViewItem>,
stmts: Vec<P<ast::Stmt>>,
expr: Option<P<ast::Expr>>) -> P<ast::Block>;
// expressions
fn expr(&self, span: Span, node: ast::Expr_) -> P<ast::Expr>;
fn expr_path(&self, path: ast::Path) -> P<ast::Expr>;
fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr>;
fn expr_self(&self, span: Span) -> P<ast::Expr>;
fn expr_binary(&self, sp: Span, op: ast::BinOp,
lhs: P<ast::Expr>, rhs: P<ast::Expr>) -> P<ast::Expr>;
fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_managed(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr>;
fn expr_field_access(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>,
idx: uint) -> P<ast::Expr>;
fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident>,
args: Vec<P<ast::Expr>> ) -> P<ast::Expr>;
fn expr_method_call(&self, span: Span,
expr: P<ast::Expr>, ident: ast::Ident,
args: Vec<P<ast::Expr>> ) -> P<ast::Expr>;
fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr>;
fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr>;
fn field_imm(&self, span: Span, name: Ident, e: P<ast::Expr>) -> ast::Field;
fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field>) -> P<ast::Expr>;
fn expr_struct_ident(&self, span: Span, id: ast::Ident,
fields: Vec<ast::Field>) -> P<ast::Expr>;
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr>;
fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr>;
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr>;
fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr>;
fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr>;
fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr>;
fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_str(&self, sp: Span, s: InternedString) -> P<ast::Expr>;
fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
fn expr_none(&self, sp: Span) -> P<ast::Expr>;
fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr>;
fn expr_unreachable(&self, span: Span) -> P<ast::Expr>;
fn expr_ok(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
fn expr_err(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
fn expr_try(&self, span: Span, head: P<ast::Expr>) -> P<ast::Expr>;
fn pat(&self, span: Span, pat: ast::Pat_) -> P<ast::Pat>;
fn pat_wild(&self, span: Span) -> P<ast::Pat>;
fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat>;
fn pat_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Pat>;
fn pat_ident_binding_mode(&self,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> P<ast::Pat>;
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<P<ast::Pat>> ) -> P<ast::Pat>;
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat> ) -> P<ast::Pat>;
fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat>;
fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>;
fn pat_none(&self, span: Span) -> P<ast::Pat>;
fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>;
fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat>;
fn arm(&self, span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm;
fn arm_unreachable(&self, span: Span) -> ast::Arm;
fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm> ) -> P<ast::Expr>;
fn expr_if(&self, span: Span,
cond: P<ast::Expr>, then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr>;
fn expr_loop(&self, span: Span, block: P<ast::Block>) -> P<ast::Expr>;
fn lambda_fn_decl(&self, span: Span,
fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> P<ast::Expr>;
fn lambda(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Block>) -> P<ast::Expr>;
fn lambda0(&self, span: Span, blk: P<ast::Block>) -> P<ast::Expr>;
fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> P<ast::Expr>;
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident> , blk: P<ast::Expr>) -> P<ast::Expr>;
fn lambda_expr_0(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr>;
fn lambda_expr_1(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr>;
fn lambda_stmts(&self, span: Span, ids: Vec<ast::Ident>,
blk: Vec<P<ast::Stmt>>) -> P<ast::Expr>;
fn lambda_stmts_0(&self, span: Span, stmts: Vec<P<ast::Stmt>>) -> P<ast::Expr>;
fn lambda_stmts_1(&self, span: Span, stmts: Vec<P<ast::Stmt>>,
ident: ast::Ident) -> P<ast::Expr>;
// items
fn item(&self, span: Span,
name: Ident, attrs: Vec<ast::Attribute> , node: ast::Item_) -> P<ast::Item>;
fn arg(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::Arg;
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl>;
fn item_fn_poly(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
generics: Generics,
body: P<ast::Block>) -> P<ast::Item>;
fn item_fn(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
body: P<ast::Block>) -> P<ast::Item>;
fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant;
fn item_enum_poly(&self,
span: Span,
name: Ident,
enum_definition: ast::EnumDef,
generics: Generics) -> P<ast::Item>;
fn item_enum(&self, span: Span, name: Ident, enum_def: ast::EnumDef) -> P<ast::Item>;
fn item_struct_poly(&self,
span: Span,
name: Ident,
struct_def: ast::StructDef,
generics: Generics) -> P<ast::Item>;
fn item_struct(&self, span: Span, name: Ident, struct_def: ast::StructDef) -> P<ast::Item>;
fn item_mod(&self, span: Span, inner_span: Span,
name: Ident, attrs: Vec<ast::Attribute>,
vi: Vec<ast::ViewItem> , items: Vec<P<ast::Item>> ) -> P<ast::Item>;
fn item_static(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
mutbl: ast::Mutability,
expr: P<ast::Expr>)
-> P<ast::Item>;
fn item_ty_poly(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
generics: Generics) -> P<ast::Item>;
fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> P<ast::Item>;
fn attribute(&self, sp: Span, mi: P<ast::MetaItem>) -> ast::Attribute;
fn meta_word(&self, sp: Span, w: InternedString) -> P<ast::MetaItem>;
fn meta_list(&self,
sp: Span,
name: InternedString,
mis: Vec<P<ast::MetaItem>> )
-> P<ast::MetaItem>;
fn meta_name_value(&self,
sp: Span,
name: InternedString,
value: ast::Lit_)
-> P<ast::MetaItem>;
fn view_use(&self, sp: Span,
vis: ast::Visibility, vp: P<ast::ViewPath>) -> ast::ViewItem;
fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem;
fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
ident: ast::Ident, path: ast::Path) -> ast::ViewItem;
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem;
fn view_use_glob(&self, sp: Span,
vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem;
}
impl<'a> AstBuilder for ExtCtxt<'a> {
fn path(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
self.path_all(span, false, strs, Vec::new(), Vec::new())
}
fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path {
self.path(span, vec!(id))
}
fn path_global(&self, span: Span, strs: Vec<ast::Ident> ) -> ast::Path {
self.path_all(span, true, strs, Vec::new(), Vec::new())
}
fn path_all(&self,
sp: Span,
global: bool,
mut idents: Vec<ast::Ident> ,
lifetimes: Vec<ast::Lifetime>,
types: Vec<P<ast::Ty>> )
-> ast::Path {
let last_identifier = idents.pop().unwrap();
let mut segments: Vec<ast::PathSegment> = idents.into_iter()
.map(|ident| {
ast::PathSegment {
identifier: ident,
lifetimes: Vec::new(),
types: OwnedSlice::empty(),
}
}).collect();
segments.push(ast::PathSegment {
identifier: last_identifier,
lifetimes: lifetimes,
types: OwnedSlice::from_vec(types),
});
ast::Path {
span: sp,
global: global,
segments: segments,
}
}
fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
ast::MutTy {
ty: ty,
mutbl: mutbl
}
}
fn ty(&self, span: Span, ty: ast::Ty_) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
span: span,
node: ty
})
}
fn ty_path(&self, path: ast::Path, bounds: Option<OwnedSlice<ast::TyParamBound>>)
-> P<ast::Ty> {
self.ty(path.span,
ast::TyPath(path, bounds, ast::DUMMY_NODE_ID))
}
// Might need to take bounds as an argument in the future, if you ever want
// to generate a bounded existential trait type.
fn ty_ident(&self, span: Span, ident: ast::Ident)
-> P<ast::Ty> {
self.ty_path(self.path_ident(span, ident), None)
}
fn ty_rptr(&self,
span: Span,
ty: P<ast::Ty>,
lifetime: Option<ast::Lifetime>,
mutbl: ast::Mutability)
-> P<ast::Ty> {
self.ty(span,
ast::TyRptr(lifetime, self.ty_mt(ty, mutbl)))
}
fn ty_ptr(&self,
span: Span,
ty: P<ast::Ty>,
mutbl: ast::Mutability)
-> P<ast::Ty> {
self.ty(span,
ast::TyPtr(self.ty_mt(ty, mutbl)))
}
fn ty_uniq(&self, span: Span, ty: P<ast::Ty>) -> P<ast::Ty> {
self.ty(span, ast::TyUniq(ty))
}
fn ty_option(&self, ty: P<ast::Ty>) -> P<ast::Ty> {
self.ty_path(
self.path_all(DUMMY_SP,
true,
vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Option")
),
Vec::new(),
vec!( ty )), None)
}
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField {
ast::TypeField {
ident: name,
mt: ast::MutTy { ty: ty, mutbl: ast::MutImmutable },
span: span,
}
}
fn ty_infer(&self, span: Span) -> P<ast::Ty> {
self.ty(span, ast::TyInfer)
}
fn ty_nil(&self) -> P<ast::Ty> {
P(ast::Ty {
id: ast::DUMMY_NODE_ID,
node: ast::TyNil,
span: DUMMY_SP,
})
}
fn typaram(&self,
span: Span,
id: ast::Ident,
bounds: OwnedSlice<ast::TyParamBound>,
unbound: Option<ast::TyParamBound>,
default: Option<P<ast::Ty>>) -> ast::TyParam {
ast::TyParam {
ident: id,
id: ast::DUMMY_NODE_ID,
bounds: bounds,
unbound: unbound,
default: default,
span: span
}
}
// these are strange, and probably shouldn't be used outside of
// pipes. Specifically, the global version possible generates
// incorrect code.
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> {
ty_params.iter().map(|p| self.ty_ident(DUMMY_SP, p.ident)).collect()
}
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> {
ty_params.iter().map(|p| self.ty_path(
self.path_global(DUMMY_SP, vec!(p.ident)), None)).collect()
}
fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
ast::TraitRef {
path: path,
ref_id: ast::DUMMY_NODE_ID,
lifetimes: Vec::new(),
}
}
fn typarambound(&self, path: ast::Path) -> ast::TyParamBound {
ast::TraitTyParamBound(self.trait_ref(path))
}
fn lifetime(&self, span: Span, name: ast::Name) -> ast::Lifetime {
ast::Lifetime { id: ast::DUMMY_NODE_ID, span: span, name: name }
}
fn lifetime_def(&self,
span: Span,
name: ast::Name,
bounds: Vec<ast::Lifetime>)
-> ast::LifetimeDef {
ast::LifetimeDef {
lifetime: self.lifetime(span, name),
bounds: bounds
}
}
fn stmt_expr(&self, expr: P<ast::Expr>) -> P<ast::Stmt> {
P(respan(expr.span, ast::StmtSemi(expr, ast::DUMMY_NODE_ID)))
}
fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident,
ex: P<ast::Expr>) -> P<ast::Stmt> {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable))
} else {
self.pat_ident(sp, ident)
};
let local = P(ast::Local {
ty: self.ty_infer(sp),
pat: pat,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
source: ast::LocalLet,
});
let decl = respan(sp, ast::DeclLocal(local));
P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID)))
}
fn stmt_let_typed(&self,
sp: Span,
mutbl: bool,
ident: ast::Ident,
typ: P<ast::Ty>,
ex: P<ast::Expr>)
-> P<ast::Stmt> {
let pat = if mutbl {
self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable))
} else {
self.pat_ident(sp, ident)
};
let local = P(ast::Local {
ty: typ,
pat: pat,
init: Some(ex),
id: ast::DUMMY_NODE_ID,
span: sp,
source: ast::LocalLet,
});
let decl = respan(sp, ast::DeclLocal(local));
P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID)))
}
fn block(&self, span: Span, stmts: Vec<P<ast::Stmt>>,
expr: Option<P<Expr>>) -> P<ast::Block> {
self.block_all(span, Vec::new(), stmts, expr)
}
fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> P<ast::Stmt> {
let decl = respan(sp, ast::DeclItem(item));
P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID)))
}
fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
self.block_all(expr.span, Vec::new(), Vec::new(), Some(expr))
}
fn block_all(&self,
span: Span,
view_items: Vec<ast::ViewItem>,
stmts: Vec<P<ast::Stmt>>,
expr: Option<P<ast::Expr>>) -> P<ast::Block> {
P(ast::Block {
view_items: view_items,
stmts: stmts,
expr: expr,
id: ast::DUMMY_NODE_ID,
rules: ast::DefaultBlock,
span: span,
})
}
fn expr(&self, span: Span, node: ast::Expr_) -> P<ast::Expr> {
P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: node,
span: span,
})
}
fn expr_path(&self, path: ast::Path) -> P<ast::Expr> {
self.expr(path.span, ast::ExprPath(path))
}
fn expr_ident(&self, span: Span, id: ast::Ident) -> P<ast::Expr> {
self.expr_path(self.path_ident(span, id))
}
fn expr_self(&self, span: Span) -> P<ast::Expr> {
self.expr_ident(span, special_idents::self_)
}
fn expr_binary(&self, sp: Span, op: ast::BinOp,
lhs: P<ast::Expr>, rhs: P<ast::Expr>) -> P<ast::Expr> {
self.expr(sp, ast::ExprBinary(op, lhs, rhs))
}
fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr_unary(sp, ast::UnDeref, e)
}
fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr(sp, ast::ExprUnary(op, e))
}
fn expr_managed(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr_unary(sp, ast::UnBox, e)
}
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
let field_name = token::get_ident(ident);
let field_span = Span {
lo: sp.lo - Pos::from_uint(field_name.get().len()),
hi: sp.hi,
expn_id: sp.expn_id,
};
let id = Spanned { node: ident, span: field_span };
self.expr(sp, ast::ExprField(expr, id, Vec::new()))
}
fn expr_tup_field_access(&self, sp: Span, expr: P<ast::Expr>, idx: uint) -> P<ast::Expr> {
let field_span = Span {
lo: sp.lo - Pos::from_uint(idx.to_string().len()),
hi: sp.hi,
expn_id: sp.expn_id,
};
let id = Spanned { node: idx, span: field_span };
self.expr(sp, ast::ExprTupField(expr, id, Vec::new()))
}
fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e))
}
fn expr_mut_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
self.expr(sp, ast::ExprAddrOf(ast::MutMutable, e))
}
fn expr_call(&self, span: Span, expr: P<ast::Expr>, args: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr(span, ast::ExprCall(expr, args))
}
fn expr_call_ident(&self, span: Span, id: ast::Ident,
args: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr(span, ast::ExprCall(self.expr_ident(span, id), args))
}
fn expr_call_global(&self, sp: Span, fn_path: Vec<ast::Ident> ,
args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
let pathexpr = self.expr_path(self.path_global(sp, fn_path));
self.expr_call(sp, pathexpr, args)
}
fn expr_method_call(&self, span: Span,
expr: P<ast::Expr>,
ident: ast::Ident,
mut args: Vec<P<ast::Expr>> ) -> P<ast::Expr> {
let id = Spanned { node: ident, span: span };
args.unshift(expr);
self.expr(span, ast::ExprMethodCall(id, Vec::new(), args))
}
fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
self.expr(b.span, ast::ExprBlock(b))
}
fn field_imm(&self, span: Span, name: Ident, e: P<ast::Expr>) -> ast::Field {
ast::Field { ident: respan(span, name), expr: e, span: span }
}
fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec<ast::Field>) -> P<ast::Expr> {
self.expr(span, ast::ExprStruct(path, fields, None))
}
fn expr_struct_ident(&self, span: Span,
id: ast::Ident, fields: Vec<ast::Field>) -> P<ast::Expr> {
self.expr_struct(span, self.path_ident(span, id), fields)
}
fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P<ast::Expr> {
self.expr(sp, ast::ExprLit(P(respan(sp, lit))))
}
fn expr_uint(&self, span: Span, i: uint) -> P<ast::Expr> {
self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyU)))
}
fn expr_int(&self, sp: Span, i: int) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyI, ast::Sign::new(i))))
}
fn expr_u8(&self, sp: Span, u: u8) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitInt(u as u64, ast::UnsignedIntLit(ast::TyU8)))
}
fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitBool(value))
}
fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr(sp, ast::ExprVec(exprs))
}
fn expr_vec_ng(&self, sp: Span) -> P<ast::Expr> {
self.expr_call_global(sp,
vec!(self.ident_of("std"),
self.ident_of("vec"),
self.ident_of("Vec"),
self.ident_of("new")),
Vec::new())
}
fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr_addr_of(sp, self.expr_vec(sp, exprs))
}
fn expr_str(&self, sp: Span, s: InternedString) -> P<ast::Expr> {
self.expr_lit(sp, ast::LitStr(s, ast::CookedStr))
}
fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
self.expr(sp, ast::ExprCast(expr, ty))
}
fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Some"));
self.expr_call_global(sp, some, vec!(expr))
}
fn expr_none(&self, sp: Span) -> P<ast::Expr> {
let none = self.path_global(sp, vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("None")));
self.expr_path(none)
}
fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
self.expr(sp, ast::ExprTup(exprs))
}
fn expr_fail(&self, span: Span, msg: InternedString) -> P<ast::Expr> {
let loc = self.codemap().lookup_char_pos(span.lo);
let expr_file = self.expr_str(span,
token::intern_and_get_ident(loc.file
.name
.as_slice()));
let expr_line = self.expr_uint(span, loc.line);
let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line));
let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple);
self.expr_call_global(
span,
vec!(
self.ident_of("std"),
self.ident_of("rt"),
self.ident_of("begin_unwind")),
vec!(
self.expr_str(span, msg),
expr_file_line_ptr))
}
fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
self.expr_fail(span,
InternedString::new(
"internal error: entered unreachable code"))
}
fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
let ok = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Ok"));
self.expr_call_global(sp, ok, vec!(expr))
}
fn expr_err(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
let err = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Err"));
self.expr_call_global(sp, err, vec!(expr))
}
fn expr_try(&self, sp: Span, head: P<ast::Expr>) -> P<ast::Expr> {
let ok = self.ident_of("Ok");
let ok_path = self.path_ident(sp, ok);
let err = self.ident_of("Err");
let err_path = self.path_ident(sp, err);
let binding_variable = self.ident_of("__try_var");
let binding_pat = self.pat_ident(sp, binding_variable);
let binding_expr = self.expr_ident(sp, binding_variable);
// Ok(__try_var) pattern
let ok_pat = self.pat_enum(sp, ok_path, vec!(binding_pat.clone()));
// Err(__try_var) (pattern and expression resp.)
let err_pat = self.pat_enum(sp, err_path, vec!(binding_pat));
let err_inner_expr = self.expr_call_ident(sp, err, vec!(binding_expr.clone()));
// return Err(__try_var)
let err_expr = self.expr(sp, ast::ExprRet(Some(err_inner_expr)));
// Ok(__try_var) => __try_var
let ok_arm = self.arm(sp, vec!(ok_pat), binding_expr);
// Err(__try_var) => return Err(__try_var)
let err_arm = self.arm(sp, vec!(err_pat), err_expr);
// match head { Ok() => ..., Err() => ... }
self.expr_match(sp, head, vec!(ok_arm, err_arm))
}
fn pat(&self, span: Span, pat: ast::Pat_) -> P<ast::Pat> {
P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span })
}
fn pat_wild(&self, span: Span) -> P<ast::Pat> {
self.pat(span, ast::PatWild(ast::PatWildSingle))
}
fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat> {
self.pat(span, ast::PatLit(expr))
}
fn pat_ident(&self, span: Span, ident: ast::Ident) -> P<ast::Pat> {
self.pat_ident_binding_mode(span, ident, ast::BindByValue(ast::MutImmutable))
}
fn pat_ident_binding_mode(&self,
span: Span,
ident: ast::Ident,
bm: ast::BindingMode) -> P<ast::Pat> {
let pat = ast::PatIdent(bm, Spanned{span: span, node: ident}, None);
self.pat(span, pat)
}
fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
let pat = ast::PatEnum(path, Some(subpats));
self.pat(span, pat)
}
fn pat_struct(&self, span: Span,
path: ast::Path, field_pats: Vec<ast::FieldPat>) -> P<ast::Pat> {
let pat = ast::PatStruct(path, field_pats, false);
self.pat(span, pat)
}
fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
let pat = ast::PatTup(pats);
self.pat(span, pat)
}
fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("Some"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!(pat))
}
fn pat_none(&self, span: Span) -> P<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("option"),
self.ident_of("None"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!())
}
fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Ok"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!(pat))
}
fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
let some = vec!(
self.ident_of("std"),
self.ident_of("result"),
self.ident_of("Err"));
let path = self.path_global(span, some);
self.pat_enum(span, path, vec!(pat))
}
fn arm(&self, _span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm {
ast::Arm {
attrs: vec!(),
pats: pats,
guard: None,
body: expr
}
}
fn arm_unreachable(&self, span: Span) -> ast::Arm {
self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span))
}
fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm>) -> P<Expr> {
self.expr(span, ast::ExprMatch(arg, arms))
}
fn expr_if(&self, span: Span, cond: P<ast::Expr>,
then: P<ast::Expr>, els: Option<P<ast::Expr>>) -> P<ast::Expr> {
let els = els.map(|x| self.expr_block(self.block_expr(x)));
self.expr(span, ast::ExprIf(cond, self.block_expr(then), els))
}
fn expr_loop(&self, span: Span, block: P<ast::Block>) -> P<ast::Expr> {
self.expr(span, ast::ExprLoop(block, None))
}
fn lambda_fn_decl(&self, span: Span,
fn_decl: P<ast::FnDecl>, blk: P<ast::Block>) -> P<ast::Expr> {
self.expr(span, ast::ExprFnBlock(ast::CaptureByRef, fn_decl, blk))
}
fn lambda(&self, span: Span, ids: Vec<ast::Ident>, blk: P<ast::Block>) -> P<ast::Expr> {
let fn_decl = self.fn_decl(
ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(),
self.ty_infer(span));
self.expr(span, ast::ExprFnBlock(ast::CaptureByRef, fn_decl, blk))
}
fn lambda0(&self, span: Span, blk: P<ast::Block>) -> P<ast::Expr> {
self.lambda(span, Vec::new(), blk)
}
fn lambda1(&self, span: Span, blk: P<ast::Block>, ident: ast::Ident) -> P<ast::Expr> {
self.lambda(span, vec!(ident), blk)
}
fn lambda_expr(&self, span: Span, ids: Vec<ast::Ident>,
expr: P<ast::Expr>) -> P<ast::Expr> {
self.lambda(span, ids, self.block_expr(expr))
}
fn lambda_expr_0(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
self.lambda0(span, self.block_expr(expr))
}
fn lambda_expr_1(&self, span: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
self.lambda1(span, self.block_expr(expr), ident)
}
fn lambda_stmts(&self,
span: Span,
ids: Vec<ast::Ident>,
stmts: Vec<P<ast::Stmt>>)
-> P<ast::Expr> {
self.lambda(span, ids, self.block(span, stmts, None))
}
fn lambda_stmts_0(&self, span: Span, stmts: Vec<P<ast::Stmt>>) -> P<ast::Expr> {
self.lambda0(span, self.block(span, stmts, None))
}
fn lambda_stmts_1(&self, span: Span, stmts: Vec<P<ast::Stmt>>,
ident: ast::Ident) -> P<ast::Expr> {
self.lambda1(span, self.block(span, stmts, None), ident)
}
fn arg(&self, span: Span, ident: ast::Ident, ty: P<ast::Ty>) -> ast::Arg {
let arg_pat = self.pat_ident(span, ident);
ast::Arg {
ty: ty,
pat: arg_pat,
id: ast::DUMMY_NODE_ID
}
}
// FIXME unused self
fn fn_decl(&self, inputs: Vec<ast::Arg> , output: P<ast::Ty>) -> P<ast::FnDecl> {
P(ast::FnDecl {
inputs: inputs,
output: output,
cf: ast::Return,
variadic: false
})
}
fn item(&self, span: Span, name: Ident,
attrs: Vec<ast::Attribute>, node: ast::Item_) -> P<ast::Item> {
// FIXME: Would be nice if our generated code didn't violate
// Rust coding conventions
P(ast::Item {
ident: name,
attrs: attrs,
id: ast::DUMMY_NODE_ID,
node: node,
vis: ast::Inherited,
span: span
})
}
fn item_fn_poly(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
generics: Generics,
body: P<ast::Block>) -> P<ast::Item> {
self.item(span,
name,
Vec::new(),
ast::ItemFn(self.fn_decl(inputs, output),
ast::NormalFn,
abi::Rust,
generics,
body))
}
fn item_fn(&self,
span: Span,
name: Ident,
inputs: Vec<ast::Arg> ,
output: P<ast::Ty>,
body: P<ast::Block>
) -> P<ast::Item> {
self.item_fn_poly(
span,
name,
inputs,
output,
ast_util::empty_generics(),
body)
}
fn variant(&self, span: Span, name: Ident, tys: Vec<P<ast::Ty>> ) -> ast::Variant {
let args = tys.into_iter().map(|ty| {
ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID }
}).collect();
respan(span,
ast::Variant_ {
name: name,
attrs: Vec::new(),
kind: ast::TupleVariantKind(args),
id: ast::DUMMY_NODE_ID,
disr_expr: None,
vis: ast::Public
})
}
fn item_enum_poly(&self, span: Span, name: Ident,
enum_definition: ast::EnumDef,
generics: Generics) -> P<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemEnum(enum_definition, generics))
}
fn item_enum(&self, span: Span, name: Ident,
enum_definition: ast::EnumDef) -> P<ast::Item> {
self.item_enum_poly(span, name, enum_definition,
ast_util::empty_generics())
}
fn item_struct(&self, span: Span, name: Ident,
struct_def: ast::StructDef) -> P<ast::Item> {
self.item_struct_poly(
span,
name,
struct_def,
ast_util::empty_generics()
)
}
fn item_struct_poly(&self, span: Span, name: Ident,
struct_def: ast::StructDef, generics: Generics) -> P<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemStruct(P(struct_def), generics))
}
fn item_mod(&self, span: Span, inner_span: Span, name: Ident,
attrs: Vec<ast::Attribute> ,
vi: Vec<ast::ViewItem> ,
items: Vec<P<ast::Item>> ) -> P<ast::Item> {
self.item(
span,
name,
attrs,
ast::ItemMod(ast::Mod {
inner: inner_span,
view_items: vi,
items: items,
})
)
}
fn item_static(&self,
span: Span,
name: Ident,
ty: P<ast::Ty>,
mutbl: ast::Mutability,
expr: P<ast::Expr>)
-> P<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemStatic(ty, mutbl, expr))
}
fn item_ty_poly(&self, span: Span, name: Ident, ty: P<ast::Ty>,
generics: Generics) -> P<ast::Item> {
self.item(span, name, Vec::new(), ast::ItemTy(ty, generics))
}
fn item_ty(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> P<ast::Item> {
self.item_ty_poly(span, name, ty, ast_util::empty_generics())
}
fn attribute(&self, sp: Span, mi: P<ast::MetaItem>) -> ast::Attribute {
respan(sp, ast::Attribute_ {
id: attr::mk_attr_id(),
style: ast::AttrOuter,
value: mi,
is_sugared_doc: false,
})
}
fn meta_word(&self, sp: Span, w: InternedString) -> P<ast::MetaItem> {
P(respan(sp, ast::MetaWord(w)))
}
fn meta_list(&self,
sp: Span,
name: InternedString,
mis: Vec<P<ast::MetaItem>> )
-> P<ast::MetaItem> {
P(respan(sp, ast::MetaList(name, mis)))
}
fn meta_name_value(&self,
sp: Span,
name: InternedString,
value: ast::Lit_)
-> P<ast::MetaItem> {
P(respan(sp, ast::MetaNameValue(name, respan(sp, value))))
}
fn view_use(&self, sp: Span,
vis: ast::Visibility, vp: P<ast::ViewPath>) -> ast::ViewItem {
ast::ViewItem {
node: ast::ViewItemUse(vp),
attrs: Vec::new(),
vis: vis,
span: sp
}
}
fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem {
let last = path.segments.last().unwrap().identifier;
self.view_use_simple_(sp, vis, last, path)
}
fn view_use_simple_(&self, sp: Span, vis: ast::Visibility,
ident: ast::Ident, path: ast::Path) -> ast::ViewItem {
self.view_use(sp, vis,
P(respan(sp,
ast::ViewPathSimple(ident,
path,
ast::DUMMY_NODE_ID))))
}
fn view_use_list(&self, sp: Span, vis: ast::Visibility,
path: Vec<ast::Ident> , imports: &[ast::Ident]) -> ast::ViewItem {
let imports = imports.iter().map(|id| {
respan(sp, ast::PathListIdent { name: *id, id: ast::DUMMY_NODE_ID })
}).collect();
self.view_use(sp, vis,
P(respan(sp,
ast::ViewPathList(self.path(sp, path),
imports,
ast::DUMMY_NODE_ID))))
}
fn view_use_glob(&self, sp: Span,
vis: ast::Visibility, path: Vec<ast::Ident> ) -> ast::ViewItem {
self.view_use(sp, vis,
P(respan(sp,
ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID))))
}
}<|fim▁end|> | pub use ext;
pub use parse; |
<|file_name|>extfmt-non-literal2.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your<|fim▁hole|>// error-pattern: literal
fn main() {
// fmt!'s first argument must be a literal. Hopefully this
// restriction can be eased eventually to just require a
// compile-time constant.
let x = fmt!(20);
}<|fim▁end|> | // option. This file may not be copied, modified, or distributed
// except according to those terms.
|
<|file_name|>main.js<|end_file_name|><|fim▁begin|>/*<|fim▁hole|>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
'use strict';
var xhr = new XMLHttpRequest();
xhr.open('GET', 'data.json');
xhr.onreadystatechange = function() {
if (xhr.readyState === 4 && xhr.status === 200) {
var data = JSON.parse(xhr.responseText);
document.querySelector('#data').innerHTML = JSON.stringify(data);
}
};
/*
// can do this in Chrome, Firefox, etc.:
xhr.onload = function(event) {
var data = JSON.parse(this.response);
document.querySelector('#data').innerHTML = JSON.stringify(data);
}
*/
xhr.send();<|fim▁end|> | Copyright 2017 Google Inc. |
<|file_name|>cron.go<|end_file_name|><|fim▁begin|>// Copyright 2014 The Gogs Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package cron
import (
"time"
"github.com/gogits/cron"
"github.com/gigforks/gogs/models"
"github.com/gigforks/gogs/modules/log"
"github.com/gigforks/gogs/modules/setting"
)
var c = cron.New()
func NewContext() {
var (
entry *cron.Entry
err error
)
if setting.Cron.UpdateMirror.Enabled {
entry, err = c.AddFunc("Update mirrors", setting.Cron.UpdateMirror.Schedule, models.MirrorUpdate)
if err != nil {
log.Fatal(4, "Cron[Update mirrors]: %v", err)
}
if setting.Cron.UpdateMirror.RunAtStart {
entry.Prev = time.Now()
entry.ExecTimes++<|fim▁hole|> go models.MirrorUpdate()
}
}
if setting.Cron.RepoHealthCheck.Enabled {
entry, err = c.AddFunc("Repository health check", setting.Cron.RepoHealthCheck.Schedule, models.GitFsck)
if err != nil {
log.Fatal(4, "Cron[Repository health check]: %v", err)
}
if setting.Cron.RepoHealthCheck.RunAtStart {
entry.Prev = time.Now()
entry.ExecTimes++
go models.GitFsck()
}
}
if setting.Cron.CheckRepoStats.Enabled {
entry, err = c.AddFunc("Check repository statistics", setting.Cron.CheckRepoStats.Schedule, models.CheckRepoStats)
if err != nil {
log.Fatal(4, "Cron[Check repository statistics]: %v", err)
}
if setting.Cron.CheckRepoStats.RunAtStart {
entry.Prev = time.Now()
entry.ExecTimes++
go models.CheckRepoStats()
}
}
c.Start()
}
// ListTasks returns all running cron tasks.
func ListTasks() []*cron.Entry {
return c.Entries()
}<|fim▁end|> | |
<|file_name|>task.js<|end_file_name|><|fim▁begin|>// 对字符串头尾进行空格字符的去除、包括全角半角空格、Tab等,返回一个字符串
function trim(str) {
var regex1 = /^\s*/;
var regex2 = /\s*$/;
return (str.replace(regex1, "")).replace(regex2, "");
}
// 给一个element绑定一个针对event事件的响应,响应函数为listener
function addEvent(element, event, listener, isCorrect) {
if (element.addEventListener) {
element.addEventListener(event, listener, isCorrect);
}
else if (element.attachEvent) {
element.attachEvent("on" + event, listener);
}
else {
element["on" + event] = listener;
}
}
var validate = {
//将name中的所有中文字符替换(1中文字符长度=2英文字符长度)
nameVali: function (str) {
var chineseRegex = /[\u4E00-\uFA29]|[\uE7C7-\uE7F3]/g;
var lenRegex = /^.{4,16}$/;
if (str.length == 0) {
return false;
}
else if (!lenRegex.test(str)) {
return false
}
else {
return true;
}
},
//密码验证
passwordVali: function (str) {
return (str.length >= 8 && str.length<= 20);
},
//再次输入的密码验证
repasswordVali: function (str, id) {
var password = document.querySelector("#" + id).value;
return (str === password);
},
// 判断是否为邮箱地址
// 第一部分:由字母、数字、下划线、短线“-”、点号“.”组成,
// 第二部分:为一个域名,域名由字母、数字、短线“-”、域名后缀组成,
// 而域名后缀一般为.xxx或.xxx.xx,一区的域名后缀一般为2-4位,如cn,com,net,现在域名有的也会大于4位
emailVali: function (str) {
var regex = /^([\w-*\.*]+)@([\w-]+)((\.[\w-]{2,4}){1,2})$/;
return regex.test(str);
},
// 判断是否为手机号
telephoneVali: function (str) {
var regex = /^1[0-9]{10}$/;
return regex.test(str);
},
allVali: function () {
var inputArray = document.querySelectorAll("input");
var count = 0;
for (var cur = 0; cur < inputArray.length; cur++) {
if (inputArray[cur].className == "correctInput") {
count++;
}
}
return (count === inputArray.length);
}
}
function formFactory(data) {
var whole = {
settings: {
label: data.label,
name: data.name,
type: data.type,
validator: data.validator,
rules: data.rules,
success: data.success,
empty: data.empty,
fail: data.fail
},
generateInput: function(type) {
var that = this;
var container = document.getElementById("father");
var span = document.createElement("span");
span.innerText = that.settings.label;
var p = document.createElement("p");
p.className = "status";
var label = document.createElement("label");
var input = document.createElement("input");
input.name = that.settings.name;
input.type = that.settings.type;
input.id = that.settings.name;
addEvent(input, "focus", function() {
input.className = "inputFocus";
p.innerText = that.settings.rules;
}, true);
addEvent(input, "blur", function() {
var verify = "";
if (type == "single") {
verify = that.settings.validator(this.value);
}
else if (type == "verify") {
verify = that.settings.validator(this.value) && (this.value.length != 0);
}
if (verify) {
input.className = "correctInput";
p.className = "status correctSta";
p.innerText = that.settings.success;
}
else {
input.className = "wrongInput";
p.className = "status wrongSta";
if (this.value.length == 0) {
p.innerText = that.settings.empty;
}
else p.innerText = that.settings.fail;
}
}, true);
container.appendChild(label);
label.appendChild(span);<|fim▁hole|> label.appendChild(input);
container.appendChild(p);
},
generateButton: function() {
var that = this;
var container = document.getElementById("father");
var button = document.createElement("button");
button.innerHTML = that.settings.label;
addEvent(button, "click", function() {
if (that.settings.validator()) {
alert("提交成功!");
}
else alert("提交失败!");
}, false);
container.appendChild(button);
},
init: function() {
var that = this;
//判断类型
switch (that.settings.name) {
case 'name':
that.generateInput('single');
break;
case 'password':
that.generateInput('single');
break;
case 'repassword':
that.generateInput('verify');
break;
case 'email':
that.generateInput('single');
break;
case 'telephone':
that.generateInput('single');
break;
case 'submit':
that.generateButton();
break;
}
}
}
return whole.init();
}
window.onload = function() {
for (var i = 0; i < data.length; i++) {
formFactory(data[i]);
}
}<|fim▁end|> | |
<|file_name|>saveData.py<|end_file_name|><|fim▁begin|>def saveData(X, f_out, colfmt='%i'):
'''
Quick alias for saving data matricies. If X and f_out are tuples,
this function will save multiple matricies at once.
'''
import numpy as np
if isinstance(X, tuple):
assert(len(X) == len(f_out))
for idx,Z in enumerate(X):
np.savetxt(f_out[idx], Z, delimiter=',', fmt=colfmt)
<|fim▁hole|><|fim▁end|> | else:
np.savetxt(f_out, X, delimiter=',', fmt=colfmt) |
<|file_name|>variables.py<|end_file_name|><|fim▁begin|>import sublime
import collections
VAR_MAP_LEADER = 'mapleader'
VAR_MAP_LOCAL_LEADER = 'maplocalleader'
# well-known variables
_SPECIAL_STRINGS = {
'<leader>': VAR_MAP_LEADER,
'<localleader>': VAR_MAP_LOCAL_LEADER,
}
_DEFAULTS = {
VAR_MAP_LEADER: '\\',
VAR_MAP_LOCAL_LEADER: '\\'
}
_VARIABLES = {
}
def expand_keys(seq):
'''Replaces well-known variables in key names with their corresponding
values.
'''
leader = var_name = None
# TODO(guillermooo): Can these variables appear in the middle of a
# sequence instead of at the beginning only?
if seq.lower().startswith('<leader>'):
var_name = '<leader>'
leader = _VARIABLES.get('mapleader', _DEFAULTS.get('mapleader'))
if seq.lower().startswith('<localleader>'):
var = '<localleader>'<|fim▁hole|> _DEFAULTS.get('maplocalleader'))
try:
return leader + seq[len(var_name):]
except TypeError:
return seq
def is_key_name(name):
return name.lower() in _SPECIAL_STRINGS
def get(name):
name = name.lower()
name = _SPECIAL_STRINGS.get(name, name)
return _VARIABLES.get(name, _DEFAULTS.get(name))
def set_(name, value):
# TODO(guillermooo): Set vars in settings.
_VARIABLES[name] = value
class Variables(object):
'''Stores variables during the current Sublime Text session.
Meant to be used as a descriptor with `State`.
'''
def __get__(self, instance, owner):
self.view = instance.view
self.settings = instance.settings
return self
def get(self, name):
return get(name)
def set(self, name, value):
return set_(name, value)<|fim▁end|> | local_leader = _VARIABLES.get('maplocalleader', |
<|file_name|>bitcoin_es_UY.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="es_UY" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>SweetStake</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2015 The SweetStake developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Doble clic para editar etiqueta o dirección </translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Crear una nueva dirección </translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la dirección seleccionada al portapapeles del sistema</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your SweetStake addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a SweetStake address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified SweetStake address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Borrar</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Direccion </translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(Sin etiqueta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Escriba la contraseña</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nueva contraseña</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Repetir nueva contraseña</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Introduzca la nueva contraseña para el monedero. <br/> Utilice una contraseña de <b> 10 o más caracteres al azar </ b>, o <b> ocho o más palabras </ b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Monedero cifrado</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operacion necesita la contraseña del monedero para desbloquear el mismo</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Monedero destrabado</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operacion necesita la contraseña del monedero para descifrar el mismo</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Monedero descifrado</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Cambiar contraseña</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Ingrese la contraseña anterior y la nueva de acceso a el monedero</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Confirme el cifrado del monedero</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Monedero cifrado</translation>
</message>
<message>
<location line="-58"/>
<source>SweetStake will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Fallo en el cifrado del monedero</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Fallo en el cifrado del monedero a causa de un error interno. Su monedero no esta cifrado</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Las contraseñas suministradas no coinciden.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Fallo en el desbloqueo del mondero</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La contraseña introducida para el descifrado del monedero es incorrecta.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Fallo en el descifrado del monedero</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+282"/>
<source>Sign &message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<location line="-319"/>
<source>&Overview</source>
<translation>&Vista previa</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Mostrar descripción general del monedero</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&transaciones </translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Buscar en el historial de transacciones</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Salir de la aplicacion </translation>
</message>
<message>
<location line="+6"/>
<source>Show information about SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Opciones...</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+259"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-256"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Send coins to a SweetStake address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Modify configuration options for SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambie la clave utilizada para el cifrado del monedero</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-202"/>
<source>SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+180"/>
<source>&About SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>&File</source>
<translation>&Archivo</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Configuracion </translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Ayuda</translation>
</message>
<message>
<location line="+12"/>
<source>Tabs toolbar</source>
<translation>Barra de herramientas</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[prueba_de_red]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>SweetStake client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+75"/>
<source>%n active connection(s) to SweetStake network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-312"/>
<source>About SweetStake card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show information about SweetStake card</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+297"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>A la fecha</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Ponerse al dia...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Transaccion enviada</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Transacción entrante</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid SweetStake address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>El Monedero esta <b>cifrado</b> y actualmente <b>desbloqueado</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>El Monedero esta <b>cifrado</b> y actualmente <b>bloqueado</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. SweetStake can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Direccion </translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(Sin etiqueta)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Editar dirección</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Direccion </translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Nueva dirección de recepción </translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nueva dirección de envío </translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Editar dirección de recepcion </translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Editar dirección de envío </translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>La dirección introducida "% 1" ya está en la libreta de direcciones.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid SweetStake address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>No se puede abrir el monedero.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Fallo en la nueva clave generada.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>sweetstake-qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Opciones</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start SweetStake after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start SweetStake on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Automatically open the SweetStake client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Connect to the SweetStake network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting SweetStake.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Whether to show SweetStake addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting SweetStake.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Formulario</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the SweetStake network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Transacciones recientes</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the sweetstake-qt help message to get a list with possible SweetStake command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>SweetStake - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>SweetStake Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the SweetStake debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the SweetStake RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Enviar a varios destinatarios a la vez</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 hack</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Confirmar el envío</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a SweetStake address (e.g. ShuwefdniqmkeigyesHfegoeEgwqgNzknW)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Confirmar el envio de monedas</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>La cantidad a pagar debe ser mayor que 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid SweetStake address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(Sin etiqueta)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>A&Monto:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>Pagar &A:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Introduzca una etiqueta para esta dirección para añadirla a su libreta de direcciones</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. ShuwefdniqmkeigyesHfegoeEgwqgNzknW)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Pegar la dirección desde el portapapeles</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a SweetStake address (e.g. ShuwefdniqmkeigyesHfegoeEgwqgNzknW)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. ShuwefdniqmkeigyesHfegoeEgwqgNzknW)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Pegar la dirección desde el portapapeles</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this SweetStake address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. ShuwefdniqmkeigyesHfegoeEgwqgNzknW)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified SweetStake address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a SweetStake address (e.g. ShuwefdniqmkeigyesHfegoeEgwqgNzknW)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter SweetStake signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Abrir hasta %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 510 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>desconocido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Direccion </translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Abrir hasta %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Direccion </translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>SweetStake version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or sweetstaked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: SweetStake.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: sweetstaked.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 15714 or testnet: 25714)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 15715 or testnet: 25715)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong SweetStake will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message><|fim▁hole|> </message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=SweetStakerpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "SweetStake Alert" [email protected]
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. SweetStake is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of SweetStake</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart SweetStake to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. SweetStake is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | <message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation type="unfinished"/> |
<|file_name|>service_worker_database_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "content/browser/service_worker/service_worker_database.h"
#include <string>
#include "base/files/file_util.h"
#include "base/files/scoped_temp_dir.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "content/browser/service_worker/service_worker_database.pb.h"
#include "content/common/service_worker/service_worker_types.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/leveldatabase/src/include/leveldb/write_batch.h"
namespace content {
namespace {
typedef ServiceWorkerDatabase::RegistrationData RegistrationData;
typedef ServiceWorkerDatabase::ResourceRecord Resource;
struct AvailableIds {
int64 reg_id;
int64 res_id;
int64 ver_id;
AvailableIds() : reg_id(-1), res_id(-1), ver_id(-1) {}
~AvailableIds() {}
};
GURL URL(const GURL& origin, const std::string& path) {
EXPECT_TRUE(origin.is_valid());
EXPECT_EQ(origin, origin.GetOrigin());
GURL out(origin.spec() + path);
EXPECT_TRUE(out.is_valid());
return out;
}
Resource CreateResource(int64 resource_id, const GURL& url, uint64 size_bytes) {
EXPECT_TRUE(url.is_valid());
return Resource(resource_id, url, size_bytes);
}
ServiceWorkerDatabase* CreateDatabase(const base::FilePath& path) {
return new ServiceWorkerDatabase(path);
}
ServiceWorkerDatabase* CreateDatabaseInMemory() {
return new ServiceWorkerDatabase(base::FilePath());
}
void VerifyRegistrationData(const RegistrationData& expected,
const RegistrationData& actual) {
EXPECT_EQ(expected.registration_id, actual.registration_id);
EXPECT_EQ(expected.scope, actual.scope);
EXPECT_EQ(expected.script, actual.script);
EXPECT_EQ(expected.version_id, actual.version_id);
EXPECT_EQ(expected.is_active, actual.is_active);
EXPECT_EQ(expected.has_fetch_handler, actual.has_fetch_handler);
EXPECT_EQ(expected.last_update_check, actual.last_update_check);
EXPECT_EQ(expected.resources_total_size_bytes,
actual.resources_total_size_bytes);
}
void VerifyResourceRecords(const std::vector<Resource>& expected,
const std::vector<Resource>& actual) {
ASSERT_EQ(expected.size(), actual.size());
for (size_t i = 0; i < expected.size(); ++i) {
EXPECT_EQ(expected[i].resource_id, actual[i].resource_id);
EXPECT_EQ(expected[i].url, actual[i].url);
EXPECT_EQ(expected[i].size_bytes, actual[i].size_bytes);
}
}
} // namespace
TEST(ServiceWorkerDatabaseTest, OpenDatabase) {
base::ScopedTempDir database_dir;
ASSERT_TRUE(database_dir.CreateUniqueTempDir());
scoped_ptr<ServiceWorkerDatabase> database(
CreateDatabase(database_dir.path()));
// Should be false because the database does not exist at the path.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->LazyOpen(false));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true));
database.reset(CreateDatabase(database_dir.path()));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(false));
}
TEST(ServiceWorkerDatabaseTest, OpenDatabase_InMemory) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
// Should be false because the database does not exist in memory.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->LazyOpen(false));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true));
database.reset(CreateDatabaseInMemory());
// Should be false because the database is not persistent.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->LazyOpen(false));
}
TEST(ServiceWorkerDatabaseTest, DatabaseVersion) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true));
// Opening a new database does not write anything, so its schema version
// should be 0.
int64 db_version = -1;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadDatabaseVersion(&db_version));
EXPECT_EQ(0u, db_version);
// First writing triggers database initialization and bumps the schema
// version.
std::vector<ServiceWorkerDatabase::ResourceRecord> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
ServiceWorkerDatabase::RegistrationData data;
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data, resources, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadDatabaseVersion(&db_version));
EXPECT_LT(0, db_version);
}
TEST(ServiceWorkerDatabaseTest, UpgradeSchemaToVersion2) {
base::ScopedTempDir database_dir;
ASSERT_TRUE(database_dir.CreateUniqueTempDir());
scoped_ptr<ServiceWorkerDatabase> database(
CreateDatabase(database_dir.path()));
GURL origin("http://example.com");
// Add a registration to the database.
std::vector<ServiceWorkerDatabase::ResourceRecord> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
ServiceWorkerDatabase::RegistrationData data;
data.registration_id = 100;
data.scope = URL(origin, "/foo");
data.script = URL(origin, "/script1.js");
data.version_id = 200;
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(data, resources, &deleted_version,<|fim▁hole|> // Sanity check on current version.
int64 db_version = -1;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadDatabaseVersion(&db_version));
EXPECT_LE(2, db_version);
// Now delete the data that will be created in an upgrade to schema version 2,
// and reset the schema version to 1.
leveldb::WriteBatch batch;
batch.Delete("REGID_TO_ORIGIN:" + base::Int64ToString(data.registration_id));
batch.Put("INITDATA_DB_VERSION", base::Int64ToString(1));
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->WriteBatch(&batch));
// Make sure correct data got deleted.
GURL origin_out;
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistrationOrigin(data.registration_id, &origin_out));
// Close and reopen the database to verify the schema got updated.
database.reset(CreateDatabase(database_dir.path()));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true));
// Verify version number.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadDatabaseVersion(&db_version));
EXPECT_LE(2, db_version);
// And check that looking up origin for registration works.
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistrationOrigin(data.registration_id, &origin_out));
EXPECT_EQ(origin, origin_out);
}
TEST(ServiceWorkerDatabaseTest, GetNextAvailableIds) {
base::ScopedTempDir database_dir;
ASSERT_TRUE(database_dir.CreateUniqueTempDir());
scoped_ptr<ServiceWorkerDatabase> database(
CreateDatabase(database_dir.path()));
GURL origin("http://example.com");
// The database has never been used, so returns initial values.
AvailableIds ids;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds(
&ids.reg_id, &ids.ver_id, &ids.res_id));
EXPECT_EQ(0, ids.reg_id);
EXPECT_EQ(0, ids.ver_id);
EXPECT_EQ(0, ids.res_id);
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds(
&ids.reg_id, &ids.ver_id, &ids.res_id));
EXPECT_EQ(0, ids.reg_id);
EXPECT_EQ(0, ids.ver_id);
EXPECT_EQ(0, ids.res_id);
// Writing a registration bumps the next available ids.
std::vector<Resource> resources;
RegistrationData data1;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
data1.registration_id = 100;
data1.scope = URL(origin, "/foo");
data1.script = URL(origin, "/script1.js");
data1.version_id = 200;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds(
&ids.reg_id, &ids.ver_id, &ids.res_id));
EXPECT_EQ(101, ids.reg_id);
EXPECT_EQ(201, ids.ver_id);
EXPECT_EQ(0, ids.res_id);
// Writing uncommitted resources bumps the next available id.
const int64 kUncommittedIds[] = {0, 1, 3, 5, 6, 10};
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUncommittedResourceIds(std::set<int64>(
kUncommittedIds, kUncommittedIds + arraysize(kUncommittedIds))));
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->GetNextAvailableIds(&ids.reg_id, &ids.ver_id, &ids.res_id));
EXPECT_EQ(101, ids.reg_id);
EXPECT_EQ(201, ids.ver_id);
EXPECT_EQ(11, ids.res_id);
// Writing purgeable resources bumps the next available id.
const int64 kPurgeableIds[] = {4, 12, 16, 17, 20};
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUncommittedResourceIds(std::set<int64>(
kPurgeableIds, kPurgeableIds + arraysize(kPurgeableIds))));
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->GetNextAvailableIds(&ids.reg_id, &ids.ver_id, &ids.res_id));
EXPECT_EQ(101, ids.reg_id);
EXPECT_EQ(201, ids.ver_id);
EXPECT_EQ(21, ids.res_id);
// Writing a registration whose ids are lower than the stored ones should not
// bump the next available ids.
RegistrationData data2;
data2.registration_id = 10;
data2.scope = URL(origin, "/bar");
data2.script = URL(origin, "/script2.js");
data2.version_id = 20;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources, &deleted_version, &newly_purgeable_resources));
// Same with resources.
int64 kLowResourceId = 15;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUncommittedResourceIds(
std::set<int64>(&kLowResourceId, &kLowResourceId + 1)));
// Close and reopen the database to verify the stored values.
database.reset(CreateDatabase(database_dir.path()));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->GetNextAvailableIds(
&ids.reg_id, &ids.ver_id, &ids.res_id));
EXPECT_EQ(101, ids.reg_id);
EXPECT_EQ(201, ids.ver_id);
EXPECT_EQ(21, ids.res_id);
}
TEST(ServiceWorkerDatabaseTest, GetOriginsWithRegistrations) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
std::set<GURL> origins;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetOriginsWithRegistrations(&origins));
EXPECT_TRUE(origins.empty());
std::vector<Resource> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
GURL origin1("http://example.com");
RegistrationData data1;
data1.registration_id = 123;
data1.scope = URL(origin1, "/foo");
data1.script = URL(origin1, "/script1.js");
data1.version_id = 456;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources, &deleted_version, &newly_purgeable_resources));
GURL origin2("https://www.example.com");
RegistrationData data2;
data2.registration_id = 234;
data2.scope = URL(origin2, "/bar");
data2.script = URL(origin2, "/script2.js");
data2.version_id = 567;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources, &deleted_version, &newly_purgeable_resources));
GURL origin3("https://example.org");
RegistrationData data3;
data3.registration_id = 345;
data3.scope = URL(origin3, "/hoge");
data3.script = URL(origin3, "/script3.js");
data3.version_id = 678;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data3, resources, &deleted_version, &newly_purgeable_resources));
// |origin3| has two registrations.
RegistrationData data4;
data4.registration_id = 456;
data4.scope = URL(origin3, "/fuga");
data4.script = URL(origin3, "/script4.js");
data4.version_id = 789;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data4, resources, &deleted_version, &newly_purgeable_resources));
origins.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetOriginsWithRegistrations(&origins));
EXPECT_EQ(3U, origins.size());
EXPECT_TRUE(ContainsKey(origins, origin1));
EXPECT_TRUE(ContainsKey(origins, origin2));
EXPECT_TRUE(ContainsKey(origins, origin3));
// |origin3| has another registration, so should not remove it from the
// unique origin list.
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(data4.registration_id,
origin3,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data4.registration_id, deleted_version.registration_id);
origins.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetOriginsWithRegistrations(&origins));
EXPECT_EQ(3U, origins.size());
EXPECT_TRUE(ContainsKey(origins, origin1));
EXPECT_TRUE(ContainsKey(origins, origin2));
EXPECT_TRUE(ContainsKey(origins, origin3));
// |origin3| should be removed from the unique origin list.
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(data3.registration_id,
origin3,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data3.registration_id, deleted_version.registration_id);
origins.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetOriginsWithRegistrations(&origins));
EXPECT_EQ(2U, origins.size());
EXPECT_TRUE(ContainsKey(origins, origin1));
EXPECT_TRUE(ContainsKey(origins, origin2));
}
TEST(ServiceWorkerDatabaseTest, GetRegistrationsForOrigin) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin1("http://example.com");
GURL origin2("https://www.example.com");
GURL origin3("https://example.org");
std::vector<RegistrationData> registrations;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetRegistrationsForOrigin(origin1, ®istrations));
EXPECT_TRUE(registrations.empty());
std::vector<Resource> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
RegistrationData data1;
data1.registration_id = 100;
data1.scope = URL(origin1, "/foo");
data1.script = URL(origin1, "/script1.js");
data1.version_id = 1000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources, &deleted_version, &newly_purgeable_resources));
RegistrationData data2;
data2.registration_id = 200;
data2.scope = URL(origin2, "/bar");
data2.script = URL(origin2, "/script2.js");
data2.version_id = 2000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources, &deleted_version, &newly_purgeable_resources));
RegistrationData data3;
data3.registration_id = 300;
data3.scope = URL(origin3, "/hoge");
data3.script = URL(origin3, "/script3.js");
data3.version_id = 3000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data3, resources, &deleted_version, &newly_purgeable_resources));
// |origin3| has two registrations.
RegistrationData data4;
data4.registration_id = 400;
data4.scope = URL(origin3, "/fuga");
data4.script = URL(origin3, "/script4.js");
data4.version_id = 4000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data4, resources, &deleted_version, &newly_purgeable_resources));
registrations.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetRegistrationsForOrigin(origin3, ®istrations));
EXPECT_EQ(2U, registrations.size());
VerifyRegistrationData(data3, registrations[0]);
VerifyRegistrationData(data4, registrations[1]);
}
TEST(ServiceWorkerDatabaseTest, GetAllRegistrations) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
std::vector<RegistrationData> registrations;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetAllRegistrations(®istrations));
EXPECT_TRUE(registrations.empty());
std::vector<Resource> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
GURL origin1("http://www1.example.com");
RegistrationData data1;
data1.registration_id = 100;
data1.scope = URL(origin1, "/foo");
data1.script = URL(origin1, "/script1.js");
data1.version_id = 1000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources, &deleted_version, &newly_purgeable_resources));
GURL origin2("http://www2.example.com");
RegistrationData data2;
data2.registration_id = 200;
data2.scope = URL(origin2, "/bar");
data2.script = URL(origin2, "/script2.js");
data2.version_id = 2000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources, &deleted_version, &newly_purgeable_resources));
GURL origin3("http://www3.example.com");
RegistrationData data3;
data3.registration_id = 300;
data3.scope = URL(origin3, "/hoge");
data3.script = URL(origin3, "/script3.js");
data3.version_id = 3000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data3, resources, &deleted_version, &newly_purgeable_resources));
// |origin3| has two registrations.
RegistrationData data4;
data4.registration_id = 400;
data4.scope = URL(origin3, "/fuga");
data4.script = URL(origin3, "/script4.js");
data4.version_id = 4000;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data4, resources, &deleted_version, &newly_purgeable_resources));
registrations.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetAllRegistrations(®istrations));
EXPECT_EQ(4U, registrations.size());
VerifyRegistrationData(data1, registrations[0]);
VerifyRegistrationData(data2, registrations[1]);
VerifyRegistrationData(data3, registrations[2]);
VerifyRegistrationData(data4, registrations[3]);
}
TEST(ServiceWorkerDatabaseTest, Registration_Basic) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin("http://example.com");
RegistrationData data;
data.registration_id = 100;
data.scope = URL(origin, "/foo");
data.script = URL(origin, "/script.js");
data.version_id = 200;
data.resources_total_size_bytes = 10939 + 200;
std::vector<Resource> resources;
resources.push_back(CreateResource(1, URL(origin, "/resource1"), 10939));
resources.push_back(CreateResource(2, URL(origin, "/resource2"), 200));
// Write a resource to the uncommitted list to make sure that writing
// registration removes resource ids associated with the registration from
// the uncommitted list.
std::set<int64> uncommitted_ids;
uncommitted_ids.insert(resources[0].resource_id);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUncommittedResourceIds(uncommitted_ids));
std::set<int64> uncommitted_ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetUncommittedResourceIds(&uncommitted_ids_out));
EXPECT_EQ(uncommitted_ids, uncommitted_ids_out);
ServiceWorkerDatabase::RegistrationData deleted_version;
deleted_version.version_id = 222; // Dummy initial value
std::vector<int64> newly_purgeable_resources;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data, resources, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
// Make sure that the registration and resource records are stored.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data, data_out);
VerifyResourceRecords(resources, resources_out);
GURL origin_out;
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistrationOrigin(data.registration_id, &origin_out));
EXPECT_EQ(origin, origin_out);
// Make sure that the resource is removed from the uncommitted list.
uncommitted_ids_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetUncommittedResourceIds(&uncommitted_ids_out));
EXPECT_TRUE(uncommitted_ids_out.empty());
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(data.registration_id,
origin,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data.version_id, deleted_version.version_id);
ASSERT_EQ(resources.size(), newly_purgeable_resources.size());
for (size_t i = 0; i < resources.size(); ++i)
EXPECT_EQ(newly_purgeable_resources[i], resources[i].resource_id);
// Make sure that the registration and resource records are gone.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
EXPECT_TRUE(resources_out.empty());
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistrationOrigin(data.registration_id, &origin_out));
// Resources should be purgeable because these are no longer referred.
std::set<int64> purgeable_ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&purgeable_ids_out));
EXPECT_EQ(2u, purgeable_ids_out.size());
EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources[0].resource_id));
EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources[1].resource_id));
}
TEST(ServiceWorkerDatabaseTest, DeleteNonExistentRegistration) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin("http://example.com");
RegistrationData data;
data.registration_id = 100;
data.scope = URL(origin, "/foo");
data.script = URL(origin, "/script.js");
data.version_id = 200;
data.resources_total_size_bytes = 19 + 29129;
std::vector<Resource> resources;
resources.push_back(CreateResource(1, URL(origin, "/resource1"), 19));
resources.push_back(CreateResource(2, URL(origin, "/resource2"), 29129));
const int64 kNonExistentRegistrationId = 999;
const int64 kArbitraryVersionId = 222; // Used as a dummy initial value
ServiceWorkerDatabase::RegistrationData deleted_version;
deleted_version.version_id = kArbitraryVersionId;
std::vector<int64> newly_purgeable_resources;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data, resources, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
// Delete from an origin that has a registration.
deleted_version.version_id = kArbitraryVersionId;
newly_purgeable_resources.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(kNonExistentRegistrationId,
origin,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
// Delete from an origin that has no registration.
deleted_version.version_id = kArbitraryVersionId;
newly_purgeable_resources.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(kNonExistentRegistrationId,
GURL("http://example.net"),
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
}
TEST(ServiceWorkerDatabaseTest, Registration_Overwrite) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin("http://example.com");
RegistrationData data;
data.registration_id = 100;
data.scope = URL(origin, "/foo");
data.script = URL(origin, "/script.js");
data.version_id = 200;
data.resources_total_size_bytes = 10 + 11;
std::vector<Resource> resources1;
resources1.push_back(CreateResource(1, URL(origin, "/resource1"), 10));
resources1.push_back(CreateResource(2, URL(origin, "/resource2"), 11));
ServiceWorkerDatabase::RegistrationData deleted_version;
deleted_version.version_id = 222; // Dummy inital value
std::vector<int64> newly_purgeable_resources;
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data, resources1, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
// Make sure that the registration and resource records are stored.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data, data_out);
VerifyResourceRecords(resources1, resources_out);
// Update the registration.
RegistrationData updated_data = data;
updated_data.version_id = data.version_id + 1;
updated_data.resources_total_size_bytes = 12 + 13;
std::vector<Resource> resources2;
resources2.push_back(CreateResource(3, URL(origin, "/resource3"), 12));
resources2.push_back(CreateResource(4, URL(origin, "/resource4"), 13));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(updated_data,
resources2,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data.version_id, deleted_version.version_id);
ASSERT_EQ(resources1.size(), newly_purgeable_resources.size());
for (size_t i = 0; i < resources1.size(); ++i)
EXPECT_EQ(newly_purgeable_resources[i], resources1[i].resource_id);
// Make sure that |updated_data| is stored and resources referred from |data|
// is moved to the purgeable list.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration(
updated_data.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(updated_data, data_out);
VerifyResourceRecords(resources2, resources_out);
std::set<int64> purgeable_ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&purgeable_ids_out));
EXPECT_EQ(2u, purgeable_ids_out.size());
EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[0].resource_id));
EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[1].resource_id));
}
TEST(ServiceWorkerDatabaseTest, Registration_Multiple) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin("http://example.com");
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
// Add registration1.
RegistrationData data1;
data1.registration_id = 100;
data1.scope = URL(origin, "/foo");
data1.script = URL(origin, "/script1.js");
data1.version_id = 200;
data1.resources_total_size_bytes = 1451 + 15234;
std::vector<Resource> resources1;
resources1.push_back(CreateResource(1, URL(origin, "/resource1"), 1451));
resources1.push_back(CreateResource(2, URL(origin, "/resource2"), 15234));
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources1, &deleted_version, &newly_purgeable_resources));
// Add registration2.
RegistrationData data2;
data2.registration_id = 101;
data2.scope = URL(origin, "/bar");
data2.script = URL(origin, "/script2.js");
data2.version_id = 201;
data2.resources_total_size_bytes = 5 + 6;
std::vector<Resource> resources2;
resources2.push_back(CreateResource(3, URL(origin, "/resource3"), 5));
resources2.push_back(CreateResource(4, URL(origin, "/resource4"), 6));
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources2, &deleted_version, &newly_purgeable_resources));
// Make sure that registration1 is stored.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration(
data1.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data1, data_out);
VerifyResourceRecords(resources1, resources_out);
GURL origin_out;
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistrationOrigin(data1.registration_id, &origin_out));
EXPECT_EQ(origin, origin_out);
// Make sure that registration2 is also stored.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration(
data2.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data2, data_out);
VerifyResourceRecords(resources2, resources_out);
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistrationOrigin(data2.registration_id, &origin_out));
EXPECT_EQ(origin, origin_out);
std::set<int64> purgeable_ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&purgeable_ids_out));
EXPECT_TRUE(purgeable_ids_out.empty());
// Delete registration1.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(data1.registration_id,
origin,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data1.registration_id, deleted_version.registration_id);
// Make sure that registration1 is gone.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistration(
data1.registration_id, origin, &data_out, &resources_out));
EXPECT_TRUE(resources_out.empty());
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistrationOrigin(data1.registration_id, &origin_out));
purgeable_ids_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&purgeable_ids_out));
EXPECT_EQ(2u, purgeable_ids_out.size());
EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[0].resource_id));
EXPECT_TRUE(ContainsKey(purgeable_ids_out, resources1[1].resource_id));
// Make sure that registration2 is still alive.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration(
data2.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data2, data_out);
VerifyResourceRecords(resources2, resources_out);
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistrationOrigin(data2.registration_id, &origin_out));
EXPECT_EQ(origin, origin_out);
}
TEST(ServiceWorkerDatabaseTest, Registration_UninitializedDatabase) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
const GURL origin("http://example.com");
// Should be failed because the database does not exist.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistration(
100, origin, &data_out, &resources_out));
EXPECT_EQ(kInvalidServiceWorkerRegistrationId, data_out.registration_id);
EXPECT_TRUE(resources_out.empty());
GURL origin_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistrationOrigin(100, &origin_out));
// Deleting non-existent registration should succeed.
RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(
100, origin, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
// Actually create a new database, but not initialized yet.
database->LazyOpen(true);
// Should be failed because the database is not initialized.
ASSERT_EQ(ServiceWorkerDatabase::UNINITIALIZED, database->state_);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistration(
100, origin, &data_out, &resources_out));
EXPECT_EQ(kInvalidServiceWorkerRegistrationId, data_out.registration_id);
EXPECT_TRUE(resources_out.empty());
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistrationOrigin(100, &origin_out));
// Deleting non-existent registration should succeed.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(
100, origin, &deleted_version, &newly_purgeable_resources));
EXPECT_EQ(kInvalidServiceWorkerVersionId, deleted_version.version_id);
EXPECT_TRUE(newly_purgeable_resources.empty());
}
TEST(ServiceWorkerDatabaseTest, UserData_Basic) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
const GURL kOrigin("http://example.com");
// Add a registration.
RegistrationData data;
data.registration_id = 100;
data.scope = URL(kOrigin, "/foo");
data.script = URL(kOrigin, "/script.js");
data.version_id = 200;
std::vector<Resource> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data, resources, &deleted_version, &newly_purgeable_resources));
// Write user data associated with the stored registration.
std::string user_data_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data.registration_id, kOrigin, "key1", "data"));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data.registration_id, "key1", &user_data_out));
EXPECT_EQ("data", user_data_out);
// Writing user data not associated with the stored registration should be
// failed.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->WriteUserData(300, kOrigin, "key1", "data"));
// Write empty user data for a different key.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data.registration_id, kOrigin, "key2", std::string()));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data.registration_id, "key2", &user_data_out));
EXPECT_EQ(std::string(), user_data_out);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data.registration_id, "key1", &user_data_out));
EXPECT_EQ("data", user_data_out);
// Overwrite the existing user data.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data.registration_id, kOrigin, "key1", "overwrite"));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data.registration_id, "key1", &user_data_out));
EXPECT_EQ("overwrite", user_data_out);
// Delete the user data.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteUserData(data.registration_id, "key1"));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data.registration_id, "key1", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data.registration_id, "key2", &user_data_out));
EXPECT_EQ(std::string(), user_data_out);
}
TEST(ServiceWorkerDatabaseTest, UserData_DataIsolation) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
const GURL kOrigin("http://example.com");
// Add registration 1.
RegistrationData data1;
data1.registration_id = 100;
data1.scope = URL(kOrigin, "/foo");
data1.script = URL(kOrigin, "/script1.js");
data1.version_id = 200;
// Add registration 2.
RegistrationData data2;
data2.registration_id = 101;
data2.scope = URL(kOrigin, "/bar");
data2.script = URL(kOrigin, "/script2.js");
data2.version_id = 201;
std::vector<Resource> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources, &deleted_version, &newly_purgeable_resources));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources, &deleted_version, &newly_purgeable_resources));
// Write user data associated with the registration1.
std::string user_data_out;
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data1.registration_id, kOrigin, "key", "data1"));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data1.registration_id, "key", &user_data_out));
EXPECT_EQ("data1", user_data_out);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data2.registration_id, "key", &user_data_out));
// Write user data associated with the registration2. This shouldn't overwrite
// the data associated with registration1.
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data2.registration_id, kOrigin, "key", "data2"));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data1.registration_id, "key", &user_data_out));
EXPECT_EQ("data1", user_data_out);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data2.registration_id, "key", &user_data_out));
EXPECT_EQ("data2", user_data_out);
// Get all registrations with user data.
std::vector<std::pair<int64, std::string>> user_data_list;
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserDataForAllRegistrations("key", &user_data_list));
EXPECT_EQ(2u, user_data_list.size());
EXPECT_EQ(data1.registration_id, user_data_list[0].first);
EXPECT_EQ("data1", user_data_list[0].second);
EXPECT_EQ(data2.registration_id, user_data_list[1].first);
EXPECT_EQ("data2", user_data_list[1].second);
// Delete the data associated with the registration2. This shouldn't delete
// the data associated with registration1.
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteUserData(data2.registration_id, "key"));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data1.registration_id, "key", &user_data_out));
EXPECT_EQ("data1", user_data_out);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data2.registration_id, "key", &user_data_out));
// And again get all registrations with user data.
user_data_list.clear();
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserDataForAllRegistrations("key", &user_data_list));
EXPECT_EQ(1u, user_data_list.size());
EXPECT_EQ(data1.registration_id, user_data_list[0].first);
EXPECT_EQ("data1", user_data_list[0].second);
}
TEST(ServiceWorkerDatabaseTest, UserData_DeleteRegistration) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
const GURL kOrigin("http://example.com");
// Add registration 1.
RegistrationData data1;
data1.registration_id = 100;
data1.scope = URL(kOrigin, "/foo");
data1.script = URL(kOrigin, "/script1.js");
data1.version_id = 200;
// Add registration 2.
RegistrationData data2;
data2.registration_id = 101;
data2.scope = URL(kOrigin, "/bar");
data2.script = URL(kOrigin, "/script2.js");
data2.version_id = 201;
std::vector<Resource> resources;
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources, &deleted_version, &newly_purgeable_resources));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources, &deleted_version, &newly_purgeable_resources));
// Write user data associated with the registration1.
std::string user_data_out;
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data1.registration_id, kOrigin, "key1", "data1"));
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data1.registration_id, kOrigin, "key2", "data2"));
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data1.registration_id, "key1", &user_data_out));
ASSERT_EQ("data1", user_data_out);
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data1.registration_id, "key2", &user_data_out));
ASSERT_EQ("data2", user_data_out);
// Write user data associated with the registration2.
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data2.registration_id, kOrigin, "key3", "data3"));
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data2.registration_id, "key3", &user_data_out));
ASSERT_EQ("data3", user_data_out);
// Delete all data associated with the registration1. This shouldn't delete
// the data associated with registration2.
ASSERT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(
data1.registration_id, kOrigin,
&deleted_version, &newly_purgeable_resources));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data1.registration_id, "key1", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data1.registration_id, "key2", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data2.registration_id, "key3", &user_data_out));
EXPECT_EQ("data3", user_data_out);
}
TEST(ServiceWorkerDatabaseTest, UserData_UninitializedDatabase) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
const GURL kOrigin("http://example.com");
// Should be failed because the database does not exist.
std::string user_data_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(100, "key", &user_data_out));
// Should be failed because the associated registration does not exist.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->WriteUserData(100, kOrigin, "key", "data"));
// Deleting non-existent entry should succeed.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteUserData(100, "key"));
// Actually create a new database, but not initialized yet.
database->LazyOpen(true);
// Should be failed because the database is not initialized.
ASSERT_EQ(ServiceWorkerDatabase::UNINITIALIZED, database->state_);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(100, "key", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->WriteUserData(100, kOrigin, "key", "data"));
// Deleting non-existent entry should succeed.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteUserData(100, "key"));
}
TEST(ServiceWorkerDatabaseTest, UpdateVersionToActive) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin("http://example.com");
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
// Should be false because a registration does not exist.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->UpdateVersionToActive(0, origin));
// Add a registration.
RegistrationData data;
data.registration_id = 100;
data.scope = URL(origin, "/foo");
data.script = URL(origin, "/script.js");
data.version_id = 200;
data.is_active = false;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(data,
std::vector<Resource>(),
&deleted_version,
&newly_purgeable_resources));
// Make sure that the registration is stored.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data, data_out);
EXPECT_TRUE(resources_out.empty());
// Activate the registration.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->UpdateVersionToActive(data.registration_id, origin));
// Make sure that the registration is activated.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
RegistrationData expected_data = data;
expected_data.is_active = true;
VerifyRegistrationData(expected_data, data_out);
EXPECT_TRUE(resources_out.empty());
// Delete the registration.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(data.registration_id,
origin,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data.registration_id, deleted_version.registration_id);
// Should be false because the registration is gone.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->UpdateVersionToActive(data.registration_id, origin));
}
TEST(ServiceWorkerDatabaseTest, UpdateLastCheckTime) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
GURL origin("http://example.com");
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
// Should be false because a registration does not exist.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->UpdateLastCheckTime(0, origin, base::Time::Now()));
// Add a registration.
RegistrationData data;
data.registration_id = 100;
data.scope = URL(origin, "/foo");
data.script = URL(origin, "/script.js");
data.version_id = 200;
data.last_update_check = base::Time::Now();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(data,
std::vector<Resource>(),
&deleted_version,
&newly_purgeable_resources));
// Make sure that the registration is stored.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
VerifyRegistrationData(data, data_out);
EXPECT_TRUE(resources_out.empty());
// Update the last check time.
base::Time updated_time = base::Time::Now();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->UpdateLastCheckTime(
data.registration_id, origin, updated_time));
// Make sure that the registration is updated.
resources_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistration(
data.registration_id, origin, &data_out, &resources_out));
RegistrationData expected_data = data;
expected_data.last_update_check = updated_time;
VerifyRegistrationData(expected_data, data_out);
EXPECT_TRUE(resources_out.empty());
// Delete the registration.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteRegistration(data.registration_id,
origin,
&deleted_version,
&newly_purgeable_resources));
EXPECT_EQ(data.registration_id, deleted_version.registration_id);
// Should be false because the registration is gone.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->UpdateLastCheckTime(
data.registration_id, origin, base::Time::Now()));
}
TEST(ServiceWorkerDatabaseTest, UncommittedResourceIds) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
// Write {1, 2, 3}.
std::set<int64> ids1;
ids1.insert(1);
ids1.insert(2);
ids1.insert(3);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUncommittedResourceIds(ids1));
std::set<int64> ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetUncommittedResourceIds(&ids_out));
EXPECT_EQ(ids1, ids_out);
// Write {2, 4}.
std::set<int64> ids2;
ids2.insert(2);
ids2.insert(4);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WriteUncommittedResourceIds(ids2));
ids_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetUncommittedResourceIds(&ids_out));
std::set<int64> expected = base::STLSetUnion<std::set<int64> >(ids1, ids2);
EXPECT_EQ(expected, ids_out);
// Delete {2, 3}.
std::set<int64> ids3;
ids3.insert(2);
ids3.insert(3);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ClearUncommittedResourceIds(ids3));
ids_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetUncommittedResourceIds(&ids_out));
expected = base::STLSetDifference<std::set<int64> >(expected, ids3);
EXPECT_EQ(expected, ids_out);
}
TEST(ServiceWorkerDatabaseTest, PurgeableResourceIds) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
// Write {1, 2, 3}.
std::set<int64> ids1;
ids1.insert(1);
ids1.insert(2);
ids1.insert(3);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WritePurgeableResourceIds(ids1));
std::set<int64> ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&ids_out));
EXPECT_EQ(ids1, ids_out);
// Write {2, 4}.
std::set<int64> ids2;
ids2.insert(2);
ids2.insert(4);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->WritePurgeableResourceIds(ids2));
ids_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&ids_out));
std::set<int64> expected = base::STLSetUnion<std::set<int64> >(ids1, ids2);
EXPECT_EQ(expected, ids_out);
// Delete {2, 3}.
std::set<int64> ids3;
ids3.insert(2);
ids3.insert(3);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ClearPurgeableResourceIds(ids3));
ids_out.clear();
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&ids_out));
expected = base::STLSetDifference<std::set<int64> >(expected, ids3);
EXPECT_EQ(expected, ids_out);
}
TEST(ServiceWorkerDatabaseTest, DeleteAllDataForOrigin) {
scoped_ptr<ServiceWorkerDatabase> database(CreateDatabaseInMemory());
ServiceWorkerDatabase::RegistrationData deleted_version;
std::vector<int64> newly_purgeable_resources;
// Data associated with |origin1| will be removed.
GURL origin1("http://example.com");
GURL origin2("http://example.org");
// |origin1| has two registrations (registration1 and registration2).
RegistrationData data1;
data1.registration_id = 10;
data1.scope = URL(origin1, "/foo");
data1.script = URL(origin1, "/script1.js");
data1.version_id = 100;
data1.resources_total_size_bytes = 2013 + 512;
std::vector<Resource> resources1;
resources1.push_back(CreateResource(1, URL(origin1, "/resource1"), 2013));
resources1.push_back(CreateResource(2, URL(origin1, "/resource2"), 512));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data1, resources1, &deleted_version, &newly_purgeable_resources));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data1.registration_id, origin1, "key1", "data1"));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data1.registration_id, origin1, "key2", "data2"));
RegistrationData data2;
data2.registration_id = 11;
data2.scope = URL(origin1, "/bar");
data2.script = URL(origin1, "/script2.js");
data2.version_id = 101;
data2.resources_total_size_bytes = 4 + 5;
std::vector<Resource> resources2;
resources2.push_back(CreateResource(3, URL(origin1, "/resource3"), 4));
resources2.push_back(CreateResource(4, URL(origin1, "/resource4"), 5));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data2, resources2, &deleted_version, &newly_purgeable_resources));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data2.registration_id, origin1, "key3", "data3"));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data2.registration_id, origin1, "key4", "data4"));
// |origin2| has one registration (registration3).
RegistrationData data3;
data3.registration_id = 12;
data3.scope = URL(origin2, "/hoge");
data3.script = URL(origin2, "/script3.js");
data3.version_id = 102;
data3.resources_total_size_bytes = 6 + 7;
std::vector<Resource> resources3;
resources3.push_back(CreateResource(5, URL(origin2, "/resource5"), 6));
resources3.push_back(CreateResource(6, URL(origin2, "/resource6"), 7));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteRegistration(
data3, resources3, &deleted_version, &newly_purgeable_resources));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data3.registration_id, origin2, "key5", "data5"));
ASSERT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->WriteUserData(
data3.registration_id, origin2, "key6", "data6"));
std::set<GURL> origins_to_delete;
origins_to_delete.insert(origin1);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->DeleteAllDataForOrigins(origins_to_delete,
&newly_purgeable_resources));
// |origin1| should be removed from the unique origin list.
std::set<GURL> unique_origins;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetOriginsWithRegistrations(&unique_origins));
EXPECT_EQ(1u, unique_origins.size());
EXPECT_TRUE(ContainsKey(unique_origins, origin2));
// The registrations for |origin1| should be removed.
std::vector<RegistrationData> registrations;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetRegistrationsForOrigin(origin1, ®istrations));
EXPECT_TRUE(registrations.empty());
GURL origin_out;
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadRegistrationOrigin(data1.registration_id, &origin_out));
// The registration for |origin2| should not be removed.
RegistrationData data_out;
std::vector<Resource> resources_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->ReadRegistration(
data3.registration_id, origin2, &data_out, &resources_out));
VerifyRegistrationData(data3, data_out);
VerifyResourceRecords(resources3, resources_out);
EXPECT_EQ(
ServiceWorkerDatabase::STATUS_OK,
database->ReadRegistrationOrigin(data3.registration_id, &origin_out));
EXPECT_EQ(origin2, origin_out);
// The resources associated with |origin1| should be purgeable.
std::set<int64> purgeable_ids_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->GetPurgeableResourceIds(&purgeable_ids_out));
EXPECT_EQ(4u, purgeable_ids_out.size());
EXPECT_TRUE(ContainsKey(purgeable_ids_out, 1));
EXPECT_TRUE(ContainsKey(purgeable_ids_out, 2));
EXPECT_TRUE(ContainsKey(purgeable_ids_out, 3));
EXPECT_TRUE(ContainsKey(purgeable_ids_out, 4));
// The user data associated with |origin1| should be removed.
std::string user_data_out;
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data1.registration_id, "key1", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data1.registration_id, "key2", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data2.registration_id, "key3", &user_data_out));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_ERROR_NOT_FOUND,
database->ReadUserData(
data2.registration_id, "key4", &user_data_out));
// The user data associated with |origin2| should not be removed.
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data3.registration_id, "key5", &user_data_out));
EXPECT_EQ("data5", user_data_out);
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK,
database->ReadUserData(
data3.registration_id, "key6", &user_data_out));
EXPECT_EQ("data6", user_data_out);
}
TEST(ServiceWorkerDatabaseTest, DestroyDatabase) {
base::ScopedTempDir database_dir;
ASSERT_TRUE(database_dir.CreateUniqueTempDir());
scoped_ptr<ServiceWorkerDatabase> database(
CreateDatabase(database_dir.path()));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->LazyOpen(true));
ASSERT_TRUE(base::DirectoryExists(database_dir.path()));
EXPECT_EQ(ServiceWorkerDatabase::STATUS_OK, database->DestroyDatabase());
ASSERT_FALSE(base::DirectoryExists(database_dir.path()));
}
} // namespace content<|fim▁end|> | &newly_purgeable_resources));
|
<|file_name|>posvelacc_command_interface_test.cpp<|end_file_name|><|fim▁begin|>///////////////////////////////////////////////////////////////////////////////
// Copyright (C) 2013, PAL Robotics S.L.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
// * Neither the name of hiDOF, Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//////////////////////////////////////////////////////////////////////////////
/// \author Adolfo Rodriguez Tsouroukdissian
#include <string>
#include <gtest/gtest.h>
#include <ros/console.h>
#include <hardware_interface/posvelacc_command_interface.h>
using std::string;
using namespace hardware_interface;
TEST(PosVelAccCommandHandleTest, HandleConstruction)
{
string name = "name1";
double pos, vel, eff;
double cmd_pos, cmd_vel, cmd_acc;
EXPECT_NO_THROW(PosVelAccJointHandle tmp(JointStateHandle(name, &pos, &vel, &eff), &cmd_pos, &cmd_vel, &cmd_acc));
EXPECT_THROW(PosVelAccJointHandle tmp(JointStateHandle(name, &pos, &vel, &eff), nullptr, &cmd_vel, &cmd_acc), HardwareInterfaceException);
EXPECT_THROW(PosVelAccJointHandle tmp(JointStateHandle(name, &pos, &vel, &eff), &cmd_pos, nullptr, &cmd_acc), HardwareInterfaceException);
EXPECT_THROW(PosVelAccJointHandle tmp(JointStateHandle(name, &pos, &vel, &eff), &cmd_pos, &cmd_vel, nullptr), HardwareInterfaceException);
// Print error messages
// Requires manual output inspection, but exception message should be descriptive
try {PosVelAccJointHandle tmp(JointStateHandle(name, &pos, &vel, &eff), nullptr, nullptr, nullptr);}
catch(const HardwareInterfaceException& e) {ROS_ERROR_STREAM(e.what());}
}
#ifndef NDEBUG // NOTE: This test validates assertion triggering, hence only gets compiled in debug mode
TEST(JointStateHandleTest, AssertionTriggering)
{
PosVelAccJointHandle h;
// Data with invalid pointers should trigger an assertion
EXPECT_DEATH(h.getPosition(), ".*");
EXPECT_DEATH(h.getVelocity(), ".*");
EXPECT_DEATH(h.getEffort(), ".*");
EXPECT_DEATH(h.getCommandPosition(), ".*");
EXPECT_DEATH(h.getCommandVelocity(), ".*");
EXPECT_DEATH(h.getCommandAcceleration(), ".*");
EXPECT_DEATH(h.setCommandPosition(2.0), ".*");
EXPECT_DEATH(h.setCommandVelocity(3.0), ".*");
EXPECT_DEATH(h.setCommandAcceleration(4.0), ".*");
EXPECT_DEATH(h.setCommand(1.0, 2.0, 3.0), ".*");
}
#endif // NDEBUG
class PosVelAccCommandInterfaceTest : public ::testing::Test
{
protected:
double pos1 = {1.0}, vel1 = {2.0}, eff1 = {3.0}, cmd_pos1 = {0.0}, cmd_vel1 = {0.0}, cmd_acc1 = {0.0};
double pos2 = {4.0}, vel2 = {5.0}, eff2 = {6.0}, cmd_pos2 = {0.0}, cmd_vel2 = {0.0}, cmd_acc2 = {0.0};<|fim▁hole|> PosVelAccJointHandle hc1 = {hs1, &cmd_pos1, &cmd_vel1, &cmd_acc1};
PosVelAccJointHandle hc2 = {hs2, &cmd_pos2, &cmd_vel2, &cmd_acc2};
};
TEST_F(PosVelAccCommandInterfaceTest, ExcerciseApi)
{
PosVelAccJointInterface iface;
iface.registerHandle(hc1);
iface.registerHandle(hc2);
// Get handles
EXPECT_NO_THROW(iface.getHandle(name1));
EXPECT_NO_THROW(iface.getHandle(name2));
PosVelAccJointHandle hc1_tmp = iface.getHandle(name1);
EXPECT_EQ(name1, hc1_tmp.getName());
EXPECT_DOUBLE_EQ(pos1, hc1_tmp.getPosition());
EXPECT_DOUBLE_EQ(vel1, hc1_tmp.getVelocity());
EXPECT_DOUBLE_EQ(eff1, hc1_tmp.getEffort());
EXPECT_DOUBLE_EQ(cmd_pos1, hc1_tmp.getCommandPosition());
EXPECT_DOUBLE_EQ(cmd_vel1, hc1_tmp.getCommandVelocity());
EXPECT_DOUBLE_EQ(cmd_acc1, hc1_tmp.getCommandAcceleration());
const double new_cmd_pos1 = -1.0, new_cmd_vel1 = -2.0, new_cmd_acc1 = -3.0;
hc1_tmp.setCommand(new_cmd_pos1, new_cmd_vel1, new_cmd_acc1);
EXPECT_DOUBLE_EQ(new_cmd_pos1, hc1_tmp.getCommandPosition());
EXPECT_DOUBLE_EQ(new_cmd_vel1, hc1_tmp.getCommandVelocity());
EXPECT_DOUBLE_EQ(new_cmd_acc1, hc1_tmp.getCommandAcceleration());
PosVelAccJointHandle hc2_tmp = iface.getHandle(name2);
EXPECT_EQ(name2, hc2_tmp.getName());
EXPECT_DOUBLE_EQ(pos2, hc2_tmp.getPosition());
EXPECT_DOUBLE_EQ(vel2, hc2_tmp.getVelocity());
EXPECT_DOUBLE_EQ(eff2, hc2_tmp.getEffort());
EXPECT_DOUBLE_EQ(cmd_pos2, hc2_tmp.getCommandPosition());
EXPECT_DOUBLE_EQ(cmd_vel2, hc2_tmp.getCommandVelocity());
EXPECT_DOUBLE_EQ(cmd_acc2, hc2_tmp.getCommandAcceleration());
const double new_cmd_pos2 = -1.0, new_cmd_vel2 = -2.0, new_cmd_acc2 = -3.0;
hc2_tmp.setCommand(new_cmd_pos2, new_cmd_vel2, new_cmd_acc2);
EXPECT_DOUBLE_EQ(new_cmd_pos2, hc2_tmp.getCommandPosition());
EXPECT_DOUBLE_EQ(new_cmd_vel2, hc2_tmp.getCommandVelocity());
EXPECT_DOUBLE_EQ(new_cmd_acc2, hc2_tmp.getCommandAcceleration());
// This interface claims resources
EXPECT_EQ(2, iface.getClaims().size());
// Print error message
// Requires manual output inspection, but exception message should contain the interface name (not its base clase)
try {iface.getHandle("unknown_name");}
catch(const HardwareInterfaceException& e) {ROS_ERROR_STREAM(e.what());}
}
int main(int argc, char** argv)
{
testing::InitGoogleTest(&argc, argv);
return RUN_ALL_TESTS();
}<|fim▁end|> | string name1 = {"name_1"};
string name2 = {"name_2"};
JointStateHandle hs1 = {name1, &pos1, &vel1, &eff1};
JointStateHandle hs2 = {name2, &pos2, &vel2, &eff2}; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub(crate) mod collections;
pub(crate) mod num;
pub(crate) mod std_unstable;
use serde_derive::{Deserialize, Serialize};
use std::io::{self, Read};
pub(crate) fn string_from_read(mut read: impl Read, capacity: usize) -> io::Result<String> {
let mut buf = String::with_capacity(capacity);<|fim▁hole|>
#[cfg_attr(test, derive(PartialEq))]
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub(crate) enum ScalarOrArray<T> {
Scalar(T),
Array(Vec<T>),
}<|fim▁end|> | read.read_to_string(&mut buf)?;
Ok(buf)
} |
<|file_name|>counter.rs<|end_file_name|><|fim▁begin|>use sfml::graphics::*;
use sfml::system::{Vector2f, Vector2u};
use resize_handler::ResizeHandler;
use super::element::*;
const OFFSET: Vector2f = Vector2f { x: 32., y: 32. };
pub struct Counter<'s> {
icon: Sprite<'s>,
text: Text<'s>,
rel_pos: Vector2f,
}
impl<'s> Counter<'s> {
pub fn new(tex: &'s TextureRef, font: &'s Font, rel_pos: &Vector2f) -> Counter<'s> {
Counter {
icon: Sprite::with_texture(tex),
text: Text::new_init(" 0", font, 32),
rel_pos: *rel_pos,
}
}
pub fn set_value(&mut self, value: u8) {
let string = if value <= 9 {
format!(" {}", value)
} else if value <= 99 {
format!(" {}", value)
} else {
format!("{}", value)
};
self.text.set_string(&string);<|fim▁hole|> win_height as f32 * self.rel_pos.y);
let bounds = self.icon.global_bounds();
self.text.set_position2f(bounds.left, bounds.top);
self.text.move_(&OFFSET);
}
}
impl<'s> ResizeHandler for Counter<'s> {
fn on_resize(&mut self, width: u32, height: u32) {
self.recalculate(width, height);
}
}
impl<'s> UiDrawable for Counter<'s> {
fn draw(&self, target: &mut RenderTarget) {
target.draw(&self.icon);
target.draw(&self.text);
}
}
impl<'s> Element for Counter<'s> {
fn set_position_relative(&mut self, pos: &Vector2f, win_size: &Vector2u) {
self.rel_pos = *pos;
self.recalculate(win_size.x, win_size.y);
}
}<|fim▁end|> | }
fn recalculate(&mut self, win_width: u32, win_height: u32) {
self.icon.set_position2f(win_width as f32 * self.rel_pos.x, |
<|file_name|>bin_test.js<|end_file_name|><|fim▁begin|>/**
* Test for fur bin.
* Runs with mocha.
*/
'use strict'
const assert = require('assert')
const fs = require('fs')
const furBin = require.resolve('../bin/fur')
const execcli = require('execcli')
const mkdirp = require('mkdirp')
let tmpDir = __dirname + '/../tmp'
describe('bin', function () {
this.timeout(24000)
before(async () => {
await mkdirp(tmpDir)
})
after(async () => {
})
it('Generate favicon', async () => {
let filename = tmpDir + '/testing-bin-favicon.png'
await execcli(furBin, [ 'favicon', filename ])
assert.ok(fs.existsSync(filename))
})
it('Generate banner', async () => {<|fim▁hole|>})
/* global describe, before, after, it */<|fim▁end|> | let filename = tmpDir + '/testing-bin-banner.png'
await execcli(furBin, [ 'banner', filename ])
assert.ok(fs.existsSync(filename))
}) |
<|file_name|>test_large_ops.py<|end_file_name|><|fim▁begin|># Copyright 2013 NEC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest_lib import exceptions as lib_exc
from tempest.common import fixed_network
from tempest.common.utils import data_utils
from tempest.common import waiters<|fim▁hole|>from tempest.scenario import manager
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestLargeOpsScenario(manager.ScenarioTest):
"""
Test large operations.
This test below:
* Spin up multiple instances in one nova call, and repeat three times
* as a regular user
* TODO: same thing for cinder
"""
@classmethod
def skip_checks(cls):
super(TestLargeOpsScenario, cls).skip_checks()
if CONF.scenario.large_ops_number < 1:
raise cls.skipException("large_ops_number not set to multiple "
"instances")
@classmethod
def setup_credentials(cls):
cls.set_network_resources()
super(TestLargeOpsScenario, cls).setup_credentials()
@classmethod
def resource_setup(cls):
super(TestLargeOpsScenario, cls).resource_setup()
# list of cleanup calls to be executed in reverse order
cls._cleanup_resources = []
@classmethod
def resource_cleanup(cls):
while cls._cleanup_resources:
function, args, kwargs = cls._cleanup_resources.pop(-1)
try:
function(*args, **kwargs)
except lib_exc.NotFound:
pass
super(TestLargeOpsScenario, cls).resource_cleanup()
@classmethod
def addCleanupClass(cls, function, *arguments, **keywordArguments):
cls._cleanup_resources.append((function, arguments, keywordArguments))
def _wait_for_server_status(self, status):
for server in self.servers:
# Make sure nova list keeps working throughout the build process
self.servers_client.list_servers()
waiters.wait_for_server_status(self.servers_client,
server['id'], status)
def nova_boot(self):
name = data_utils.rand_name('scenario-server')
flavor_id = CONF.compute.flavor_ref
# Explicitly create secgroup to avoid cleanup at the end of testcases.
# Since no traffic is tested, we don't need to actually add rules to
# secgroup
secgroup = self.security_groups_client.create_security_group(
name='secgroup-%s' % name, description='secgroup-desc-%s' % name)
self.addCleanupClass(self.security_groups_client.delete_security_group,
secgroup['id'])
create_kwargs = {
'min_count': CONF.scenario.large_ops_number,
'security_groups': [{'name': secgroup['name']}]
}
network = self.get_tenant_network()
create_kwargs = fixed_network.set_networks_kwarg(network,
create_kwargs)
#self.servers_client.create_server(
self.create_server(
name,
'',
flavor_id,
**create_kwargs)
# needed because of bug 1199788
params = {'name': name}
server_list = self.servers_client.list_servers(**params)
self.servers = server_list['servers']
for server in self.servers:
# after deleting all servers - wait for all servers to clear
# before cleanup continues
self.addCleanupClass(self.servers_client.
wait_for_server_termination,
server['id'])
for server in self.servers:
self.addCleanupClass(self.servers_client.delete_server,
server['id'])
self._wait_for_server_status('ACTIVE')
def _large_ops_scenario(self):
#self.glance_image_create()
self.nova_boot()
@test.idempotent_id('14ba0e78-2ed9-4d17-9659-a48f4756ecb3')
@test.services('compute', 'image')
def test_large_ops_scenario_1(self):
self._large_ops_scenario()
@test.idempotent_id('b9b79b88-32aa-42db-8f8f-dcc8f4b4ccfe')
@test.services('compute', 'image')
def test_large_ops_scenario_2(self):
self._large_ops_scenario()
@test.idempotent_id('3aab7e82-2de3-419a-9da1-9f3a070668fb')
@test.services('compute', 'image')
def test_large_ops_scenario_3(self):
self._large_ops_scenario()<|fim▁end|> | from tempest import config |
<|file_name|>htmlanchorelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::activation::Activatable;
use dom::bindings::cell::DomRefCell;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods;
use dom::bindings::codegen::Bindings::HTMLAnchorElementBinding;
use dom::bindings::codegen::Bindings::HTMLAnchorElementBinding::HTMLAnchorElementMethods;
use dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::root::{DomRoot, MutNullableDom};
use dom::bindings::str::{DOMString, USVString};
use dom::document::Document;
use dom::domtokenlist::DOMTokenList;
use dom::element::Element;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::htmlelement::HTMLElement;
use dom::htmlimageelement::HTMLImageElement;
use dom::mouseevent::MouseEvent;
use dom::node::{Node, document_from_node};
use dom::urlhelper::UrlHelper;
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use net_traits::ReferrerPolicy;
use num_traits::ToPrimitive;
use servo_url::ServoUrl;
use std::default::Default;
use style::attr::AttrValue;
#[dom_struct]
pub struct HTMLAnchorElement {
htmlelement: HTMLElement,
rel_list: MutNullableDom<DOMTokenList>,
url: DomRefCell<Option<ServoUrl>>,
}
impl HTMLAnchorElement {
fn new_inherited(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> HTMLAnchorElement {
HTMLAnchorElement {
htmlelement:
HTMLElement::new_inherited(local_name, prefix, document),
rel_list: Default::default(),
url: DomRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(local_name: LocalName,
prefix: Option<Prefix>,
document: &Document) -> DomRoot<HTMLAnchorElement> {
Node::reflect_node(Box::new(HTMLAnchorElement::new_inherited(local_name, prefix, document)),
document,
HTMLAnchorElementBinding::Wrap)
}
// https://html.spec.whatwg.org/multipage/#concept-hyperlink-url-set
fn set_url(&self) {
let attribute = self.upcast::<Element>().get_attribute(&ns!(), &local_name!("href"));
*self.url.borrow_mut() = attribute.and_then(|attribute| {
let document = document_from_node(self);
document.base_url().join(&attribute.value()).ok()
});
}
// https://html.spec.whatwg.org/multipage/#reinitialise-url
fn reinitialize_url(&self) {
// Step 1.
match *self.url.borrow() {
Some(ref url) if url.scheme() == "blob" && url.cannot_be_a_base() => return,
_ => (),
}
// Step 2.
self.set_url();
}
// https://html.spec.whatwg.org/multipage/#update-href
fn update_href(&self, url: DOMString) {
self.upcast::<Element>().set_string_attribute(&local_name!("href"), url);
}
}
impl VirtualMethods for HTMLAnchorElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, name: &LocalName, value: DOMString) -> AttrValue {
match name {
&local_name!("rel") => AttrValue::from_serialized_tokenlist(value.into()),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
impl HTMLAnchorElementMethods for HTMLAnchorElement {
// https://html.spec.whatwg.org/multipage/#dom-a-text
fn Text(&self) -> DOMString {
self.upcast::<Node>().GetTextContent().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-a-text
fn SetText(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value))
}
// https://html.spec.whatwg.org/multipage/#dom-a-rel
make_getter!(Rel, "rel");
// https://html.spec.whatwg.org/multipage/#dom-a-rel
fn SetRel(&self, rel: DOMString) {
self.upcast::<Element>().set_tokenlist_attribute(&local_name!("rel"), rel);
}
// https://html.spec.whatwg.org/multipage/#dom-a-rellist
fn RelList(&self) -> DomRoot<DOMTokenList> {
self.rel_list.or_init(|| DOMTokenList::new(self.upcast(), &local_name!("rel")))
}
// https://html.spec.whatwg.org/multipage/#dom-a-coords
make_getter!(Coords, "coords");
// https://html.spec.whatwg.org/multipage/#dom-a-coords
make_setter!(SetCoords, "coords");
// https://html.spec.whatwg.org/multipage/#dom-a-name
make_getter!(Name, "name");
// https://html.spec.whatwg.org/multipage/#dom-a-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-a-rev
make_getter!(Rev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-a-rev
make_setter!(SetRev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-a-shape
make_getter!(Shape, "shape");
// https://html.spec.whatwg.org/multipage/#dom-a-shape
make_setter!(SetShape, "shape");
// https://html.spec.whatwg.org/multipage/#attr-hyperlink-target
make_getter!(Target, "target");
// https://html.spec.whatwg.org/multipage/#attr-hyperlink-target
make_setter!(SetTarget, "target");
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hash
fn Hash(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
Some(ref url) => {
// Steps 3-4.
UrlHelper::Hash(url)
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hash
fn SetHash(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
Some(ref url) if url.scheme() == "javascript" => return,
None => return,
// Steps 4-5.
Some(url) => {
UrlHelper::SetHash(url, value);
DOMString::from(url.as_str())
}
};
// Step 6.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-host
fn Host(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
Some(ref url) => {
if url.host().is_none() {
USVString(String::new())
} else {
// Steps 4-5.
UrlHelper::Host(url)
}
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-host
fn SetHost(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
Some(ref url) if url.cannot_be_a_base() => return,
None => return,
// Step 4.
Some(url) => {
UrlHelper::SetHost(url, value);
DOMString::from(url.as_str())
}
};
// Step 5.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hostname
fn Hostname(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
Some(ref url) => {
// Step 4.
UrlHelper::Hostname(url)
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hostname
fn SetHostname(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
Some(ref url) if url.cannot_be_a_base() => return,
None => return,
// Step 4.
Some(url) => {
UrlHelper::SetHostname(url, value);
DOMString::from(url.as_str())
}
};
// Step 5.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-href
fn Href(&self) -> USVString {
// Step 1.
self.reinitialize_url();
USVString(match *self.url.borrow() {
None => {
match self.upcast::<Element>().get_attribute(&ns!(), &local_name!("href")) {
// Step 3.
None => String::new(),
// Step 4.
Some(attribute) => (**attribute.value()).to_owned(),
}
},
// Step 5.
Some(ref url) => url.as_str().to_owned(),
})
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-href
fn SetHref(&self, value: USVString) {
self.upcast::<Element>().set_string_attribute(&local_name!("href"),
DOMString::from_string(value.0));
self.set_url();
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-origin
fn Origin(&self) -> USVString {
// Step 1.
self.reinitialize_url();
USVString(match *self.url.borrow() {
None => {
// Step 2.
"".to_owned()
},
Some(ref url) => {
// Step 3.
url.origin().unicode_serialization()
},
})
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-password
fn Password(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
// Steps 3-4.
Some(ref url) => UrlHelper::Password(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-password
fn SetPassword(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
Some(ref url) if url.host().is_none() || url.cannot_be_a_base() => return,
None => return,
// Step 4.
Some(url) => {
UrlHelper::SetPassword(url, value);
DOMString::from(url.as_str())
}
};
// Step 5.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-pathname
fn Pathname(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
// Steps 4-5.
Some(ref url) => UrlHelper::Pathname(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-pathname
fn SetPathname(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
Some(ref url) if url.cannot_be_a_base() => return,
None => return,
// Step 5.
Some(url) => {
UrlHelper::SetPathname(url, value);
DOMString::from(url.as_str())
}
};
// Step 6.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-port
fn Port(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
// Step 4.
Some(ref url) => UrlHelper::Port(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-port<|fim▁hole|> // Step 3.
let url = match self.url.borrow_mut().as_mut() {
Some(ref url) if url.host().is_none() ||
url.cannot_be_a_base() ||
url.scheme() == "file" => return,
None => return,
// Step 4.
Some(url) => {
UrlHelper::SetPort(url, value);
DOMString::from(url.as_str())
}
};
// Step 5.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-protocol
fn Protocol(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 2.
None => USVString(":".to_owned()),
// Step 3.
Some(ref url) => UrlHelper::Protocol(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-protocol
fn SetProtocol(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
let url = match self.url.borrow_mut().as_mut() {
// Step 2.
None => return,
// Step 3.
Some(url) => {
UrlHelper::SetProtocol(url, value);
DOMString::from(url.as_str())
}
};
// Step 4.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-search
fn Search(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 2.
None => USVString(String::new()),
// Step 3.
Some(ref url) => UrlHelper::Search(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-search
fn SetSearch(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
None => return,
// Steps 4-5.
// TODO add this element's node document character encoding as
// encoding override (as described in the spec)
Some(url) => {
UrlHelper::SetSearch(url, value);
DOMString::from(url.as_str())
}
};
// Step 6.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-username
fn Username(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 2.
None => USVString(String::new()),
// Step 3.
Some(ref url) => UrlHelper::Username(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-username
fn SetUsername(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
let url = match self.url.borrow_mut().as_mut() {
// Step 3.
Some(ref url) if url.host().is_none() || url.cannot_be_a_base() => return,
None => return,
// Step 4.
Some(url) => {
UrlHelper::SetUsername(url, value);
DOMString::from(url.as_str())
}
};
// Step 5.
self.update_href(url);
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-href
fn Stringifier(&self) -> DOMString {
DOMString::from(self.Href().0)
}
}
impl Activatable for HTMLAnchorElement {
fn as_element(&self) -> &Element {
self.upcast::<Element>()
}
fn is_instance_activatable(&self) -> bool {
// https://html.spec.whatwg.org/multipage/#hyperlink
// "a [...] element[s] with an href attribute [...] must [..] create a
// hyperlink"
// https://html.spec.whatwg.org/multipage/#the-a-element
// "The activation behaviour of a elements *that create hyperlinks*"
self.upcast::<Element>().has_attribute(&local_name!("href"))
}
//TODO:https://html.spec.whatwg.org/multipage/#the-a-element
fn pre_click_activation(&self) {
}
//TODO:https://html.spec.whatwg.org/multipage/#the-a-element
// https://html.spec.whatwg.org/multipage/#run-canceled-activation-steps
fn canceled_activation(&self) {
}
//https://html.spec.whatwg.org/multipage/#the-a-element:activation-behaviour
fn activation_behavior(&self, event: &Event, target: &EventTarget) {
//Step 1. If the node document is not fully active, abort.
let doc = document_from_node(self);
if !doc.is_fully_active() {
return;
}
//TODO: Step 2. Check if browsing context is specified and act accordingly.
//Step 3. Handle <img ismap/>.
let element = self.upcast::<Element>();
let mouse_event = event.downcast::<MouseEvent>().unwrap();
let mut ismap_suffix = None;
if let Some(element) = target.downcast::<Element>() {
if target.is::<HTMLImageElement>() && element.has_attribute(&local_name!("ismap")) {
let target_node = element.upcast::<Node>();
let rect = target_node.bounding_content_box_or_zero();
ismap_suffix = Some(
format!("?{},{}", mouse_event.ClientX().to_f32().unwrap() - rect.origin.x.to_f32_px(),
mouse_event.ClientY().to_f32().unwrap() - rect.origin.y.to_f32_px())
)
}
}
// Step 4.
//TODO: Download the link is `download` attribute is set.
// https://w3c.github.io/webappsec-referrer-policy/#referrer-policy-delivery
let referrer_policy = match self.RelList().Contains("noreferrer".into()) {
true => Some(ReferrerPolicy::NoReferrer),
false => None,
};
follow_hyperlink(element, ismap_suffix, referrer_policy);
}
//TODO:https://html.spec.whatwg.org/multipage/#the-a-element
fn implicit_submission(&self, _ctrl_key: bool, _shift_key: bool, _alt_key: bool, _meta_key: bool) {
}
}
/// <https://html.spec.whatwg.org/multipage/#the-rules-for-choosing-a-browsing-context-given-a-browsing-context-name>
fn is_current_browsing_context(target: DOMString) -> bool {
target.is_empty() || target == "_self"
}
/// <https://html.spec.whatwg.org/multipage/#following-hyperlinks-2>
pub fn follow_hyperlink(subject: &Element, hyperlink_suffix: Option<String>, referrer_policy: Option<ReferrerPolicy>) {
// Step 1: replace.
// Step 2: source browsing context.
// Step 3: target browsing context.
let target = subject.get_attribute(&ns!(), &local_name!("target"));
// Step 4: disown target's opener if needed.
let attribute = subject.get_attribute(&ns!(), &local_name!("href")).unwrap();
let mut href = attribute.Value();
// Step 7: append a hyperlink suffix.
// https://www.w3.org/Bugs/Public/show_bug.cgi?id=28925
if let Some(suffix) = hyperlink_suffix {
href.push_str(&suffix);
}
// Step 5: parse the URL.
// Step 6: navigate to an error document if parsing failed.
let document = document_from_node(subject);
let url = match document.url().join(&href) {
Ok(url) => url,
Err(_) => return,
};
// Step 8: navigate to the URL.
if let Some(target) = target {
if !is_current_browsing_context(target.Value()) {
// https://github.com/servo/servo/issues/13241
}
}
debug!("following hyperlink to {}", url);
let window = document.window();
window.load_url(url, false, false, referrer_policy);
}<|fim▁end|> | fn SetPort(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
|
<|file_name|>calTimer.QThread.py<|end_file_name|><|fim▁begin|>#rewrite of original calTimer to use qthreads as opposed to native python threads
#needed to make UI changes (impossible from native)
#also attempting to alleviate need for sigterm to stop perm loop
from PyQt4 import QtCore
import time,os,ctypes
import sys
class calTimer(QtCore.QThread):
xml_file = './data/data.xml'
fileSize = os.stat(xml_file)
def initFileSize(self):
print "initfilesize run"
fileToCheck = os.stat(self.xml_file)
self.fileSize = fileToCheck.st_size
<|fim▁hole|> def run(self):
self.initFileSize()
testFileSize = self.fileSize
while testFileSize == self.fileSize:
print "No change - sleep 3"
#time.sleep(3)<|fim▁end|> | |
<|file_name|>event_listening.py<|end_file_name|><|fim▁begin|>import curses
import functools
from stem.control import EventType, Controller
from stem.util import str_tools
# colors that curses can handle
COLOR_LIST = {
"red": curses.COLOR_RED,
"green": curses.COLOR_GREEN,
"yellow": curses.COLOR_YELLOW,
"blue": curses.COLOR_BLUE,
"cyan": curses.COLOR_CYAN,
"magenta": curses.COLOR_MAGENTA,
"black": curses.COLOR_BLACK,
"white": curses.COLOR_WHITE,
}
GRAPH_WIDTH = 40
GRAPH_HEIGHT = 8
DOWNLOAD_COLOR = "green"
UPLOAD_COLOR = "blue"
def main():
with Controller.from_port(port = 9051) as controller:
controller.authenticate()
try:
# This makes curses initialize and call draw_bandwidth_graph() with a
# reference to the screen, followed by additional arguments (in this
# case just the controller).
curses.wrapper(draw_bandwidth_graph, controller)
except KeyboardInterrupt:
pass # the user hit ctrl+c
def draw_bandwidth_graph(stdscr, controller):
window = Window(stdscr)
# (downloaded, uploaded) tuples for the last 40 seconds
bandwidth_rates = [(0, 0)] * GRAPH_WIDTH
# Making a partial that wraps the window and bandwidth_rates with a function
# for Tor to call when it gets a BW event. This causes the 'window' and
# 'bandwidth_rates' to be provided as the first two arguments whenever
# 'bw_event_handler()' is called.
bw_event_handler = functools.partial(_handle_bandwidth_event, window, bandwidth_rates)
# Registering this listener with Tor. Tor reports a BW event each second.
controller.add_event_listener(bw_event_handler, EventType.BW)
# Pause the main thread until the user hits any key... and no, don't you dare
# ask where the 'any' key is. :P
stdscr.getch()
def _handle_bandwidth_event(window, bandwidth_rates, event):
# callback for when tor provides us with a BW event
bandwidth_rates.insert(0, (event.read, event.written))
bandwidth_rates = bandwidth_rates[:GRAPH_WIDTH] # truncate old values
_render_graph(window, bandwidth_rates)
def _render_graph(window, bandwidth_rates):
window.erase()
download_rates = [entry[0] for entry in bandwidth_rates]<|fim▁hole|> # show the latest values at the top
label = "Downloaded (%s/s):" % str_tools.size_label(download_rates[0], 1)
window.addstr(0, 1, label, DOWNLOAD_COLOR, curses.A_BOLD)
label = "Uploaded (%s/s):" % str_tools.size_label(upload_rates[0], 1)
window.addstr(0, GRAPH_WIDTH + 7, label, UPLOAD_COLOR, curses.A_BOLD)
# draw the graph bounds in KB
max_download_rate = max(download_rates)
max_upload_rate = max(upload_rates)
window.addstr(1, 1, "%4i" % (max_download_rate / 1024), DOWNLOAD_COLOR)
window.addstr(GRAPH_HEIGHT, 1, " 0", DOWNLOAD_COLOR)
window.addstr(1, GRAPH_WIDTH + 7, "%4i" % (max_upload_rate / 1024), UPLOAD_COLOR)
window.addstr(GRAPH_HEIGHT, GRAPH_WIDTH + 7, " 0", UPLOAD_COLOR)
# draw the graph
for col in range(GRAPH_WIDTH):
col_height = GRAPH_HEIGHT * download_rates[col] / max(max_download_rate, 1)
for row in range(col_height):
window.addstr(GRAPH_HEIGHT - row, col + 6, " ", DOWNLOAD_COLOR, curses.A_STANDOUT)
col_height = GRAPH_HEIGHT * upload_rates[col] / max(max_upload_rate, 1)
for row in range(col_height):
window.addstr(GRAPH_HEIGHT - row, col + GRAPH_WIDTH + 12, " ", UPLOAD_COLOR, curses.A_STANDOUT)
window.refresh()
class Window(object):
"""
Simple wrapper for the curses standard screen object.
"""
def __init__(self, stdscr):
self._stdscr = stdscr
# Mappings of names to the curses color attribute. Initially these all
# reference black text, but if the terminal can handle color then
# they're set with that foreground color.
self._colors = dict([(color, 0) for color in COLOR_LIST])
# allows for background transparency
try:
curses.use_default_colors()
except curses.error:
pass
# makes the cursor invisible
try:
curses.curs_set(0)
except curses.error:
pass
# initializes colors if the terminal can handle them
try:
if curses.has_colors():
color_pair = 1
for name, foreground in COLOR_LIST.items():
background = -1 # allows for default (possibly transparent) background
curses.init_pair(color_pair, foreground, background)
self._colors[name] = curses.color_pair(color_pair)
color_pair += 1
except curses.error:
pass
def addstr(self, y, x, msg, color = None, attr = curses.A_NORMAL):
# Curses throws an error if we try to draw a message that spans out of the
# window's bounds (... seriously?), so doing our best to avoid that.
if color is not None:
if color not in self._colors:
recognized_colors = ", ".join(self._colors.keys())
raise ValueError("The '%s' color isn't recognized: %s" % (color, recognized_colors))
attr |= self._colors[color]
max_y, max_x = self._stdscr.getmaxyx()
if max_x > x and max_y > y:
try:
self._stdscr.addstr(y, x, msg[:max_x - x], attr)
except:
pass # maybe an edge case while resizing the window
def erase(self):
self._stdscr.erase()
def refresh(self):
self._stdscr.refresh()
if __name__ == '__main__':
main()<|fim▁end|> | upload_rates = [entry[1] for entry in bandwidth_rates]
|
<|file_name|>DataSetObservable.java<|end_file_name|><|fim▁begin|>package com.mricefox.androidhorizontalcalendar.library.calendar;
import android.database.Observable;
/**
* Author:zengzifeng email:[email protected]<|fim▁hole|> public boolean hasObservers() {
synchronized (mObservers) {
return !mObservers.isEmpty();
}
}
public void notifyChanged() {
synchronized (mObservers) {//mObservers register and notify maybe in different thread
for (int i = mObservers.size() - 1; i >= 0; i--) {
mObservers.get(i).onChanged();
}
}
}
public void notifyItemRangeChanged(long from, long to) {
synchronized (mObservers) {
for (int i = mObservers.size() - 1; i >= 0; i--) {
mObservers.get(i).onItemRangeChanged(from, to);
}
}
}
}<|fim▁end|> | * Description:
* Date:2015/12/25
*/
public class DataSetObservable extends Observable<DataSetObserver> { |
<|file_name|>test_SmartFrames.py<|end_file_name|><|fim▁begin|># Copyright 2015 Don Drake [email protected]
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
import unittest
import datetime
import time
from copy import deepcopy
src_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(src_dir, '..'))
from pyspark.sql.types import Row, StructType, StructField, IntegerType, StringType, BinaryType, BooleanType, DateType, TimestampType, DoubleType, FloatType, ByteType, LongType, ShortType
from SparkTestCase import SparkTestCase
from smartframes import SmartFrames
class SimpleTable(SmartFrames.SmartFrames):
schema = StructType( sorted(
[
StructField("pk_id", IntegerType()),
StructField("first_name", StringType()),
],
key = lambda x: x.name))
skipSelectedFields = []
class ComplexTable(SmartFrames.SmartFrames):
schema = StructType( sorted(
[
StructField("pk_id", IntegerType()),
StructField("string", StringType()),
StructField("binary", BinaryType()),
StructField("boolean", BooleanType()),
StructField("date", DateType()),
StructField("time", TimestampType()),
StructField("double1", DoubleType()),
StructField("double2", DoubleType()),
StructField("float1", FloatType()),
StructField("float2", FloatType()),
StructField("byte", ByteType()),
StructField("integer", IntegerType()),
StructField("along", LongType()),
StructField("short", ShortType()),
],
key = lambda x: x.name))
skipSelectedFields = []
class TestSmartFrames(SparkTestCase):
def testSimpleTable(self):
simpleTable = SimpleTable()
self.assertEquals(simpleTable.schema, SimpleTable().schema)
s1 = SimpleTable()
s1.pk_id = 1
s1.first_name = 'Don'
s2 = SimpleTable()
s2.pk_id = 2
s2.first_name = 'Dan'
df = self.sqlCtx.createDataFrame(self.sc.parallelize([s1.createRow(), s2.createRow()]), s1.schema)
self.assertEquals(2, df.count())
fileName = self.tempdir + '/simple.table'
df.saveAsParquetFile(fileName)
df2 = self.sqlCtx.parquetFile(fileName)
self.assertEquals(sorted(df.collect()), sorted(df2.collect()))
def testComplexTable(self):
complexTable = ComplexTable()
self.assertEquals(complexTable.schema, ComplexTable().schema)
s1 = ComplexTable()
s1.pk_id = 1
s1.string = 'abcdefghijklmnopqrstuvwxyz'
s1.binary = bytearray(b"0xDEADBEEF")
s1.boolean = True
s1.date = datetime.date(2015, 10, 3)
s1.time = datetime.datetime(2015, 10, 3, 14, 33)
s1.double1 = 1
s1.double2 = 2.2
s1.float1 = 1
s1.float2 = 2.2
s1.byte = 100
s1.integer = 10000
s1.along = 10000
s1.short = 10
df = self.sqlCtx.createDataFrame(self.sc.parallelize([s1.createRow()]), s1.schema)
fileName = self.tempdir + '/complex.table'
df.saveAsParquetFile(fileName)
df2 = self.sqlCtx.parquetFile(fileName)
self.assertEquals(sorted(df.collect()), sorted(df2.collect()))
r1 = df2.collect()[0]
print "r1=", r1
self.assertEquals(r1.pk_id, s1.pk_id)
self.assertEquals(r1.string, s1.string)
self.assertEquals(r1.binary, s1.binary)
self.assertEquals(r1.boolean, s1.boolean)
self.assertEquals(r1.date, s1.date)
self.assertEquals(r1.time, s1.time)
self.assertEquals(r1.double1, s1.double1)
self.assertEquals(r1.double2, s1.double2)
self.assertEquals(r1.float1, s1.float1)
# AssertionError: 2.200000047683716 != 2.2
#self.assertEquals(r1.float2, s1.float2)
self.assertEquals(r1.byte, s1.byte)
self.assertEquals(r1.integer, s1.integer)
self.assertEquals(r1.along, s1.along)
self.assertEquals(r1.short, s1.short)
def testComplexTableTiming(self):
s1 = ComplexTable()
s1.pk_id = 1
s1.string = 'abcdefghijklmnopqrstuvwxyz'
s1.binary = bytearray(b"0xDEADBEEF")
s1.boolean = True
s1.date = datetime.date(2015, 10, 3)
s1.time = datetime.datetime(2015, 10, 3, 14, 33)
s1.double1 = 1
s1.double2 = 2.2
s1.float1 = 1
s1.float2 = 2.2
s1.byte = 100
s1.integer = 10000
s1.along = 10000
s1.short = 10
numRows = 10000
rows = []
start = time.clock()
for n in xrange(numRows):
rows.append(deepcopy(s1.createRow()))
end = time.clock()
print "Duration for ", numRows, " is ", (end - start)
df = self.sqlCtx.createDataFrame(self.sc.parallelize(rows), s1.schema)
# duration for v.1.0.1 ~ 2.4 seconds for 10000 rows<|fim▁hole|>if __name__ == '__main__':
unittest.main()<|fim▁end|> | # duration for v.1.1.0 ~ 1.6 seconds for 10000 rows
count = df.count()
self.assertEquals(count, numRows)
|
<|file_name|>RESTPropertyCategoryQueryBuilderV1.java<|end_file_name|><|fim▁begin|>/*
Copyright 2011-2014 Red Hat, Inc
This file is part of PressGang CCMS.
PressGang CCMS is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PressGang CCMS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with PressGang CCMS. If not, see <http://www.gnu.org/licenses/>.
*/
package org.jboss.pressgang.ccms.rest.v1.query;
import java.util.ArrayList;
import java.util.List;
import org.jboss.pressgang.ccms.rest.v1.constants.CommonFilterConstants;
import org.jboss.pressgang.ccms.rest.v1.query.base.RESTBaseQueryBuilderV1;
import org.jboss.pressgang.ccms.utils.structures.Pair;
public class RESTPropertyCategoryQueryBuilderV1 extends RESTBaseQueryBuilderV1 {
private static List<Pair<String, String>> filterPairs = new ArrayList<Pair<String, String>>() {
private static final long serialVersionUID = -8638470044710698912L;
{
add(new Pair<String, String>(CommonFilterConstants.PROP_CATEGORY_IDS_FILTER_VAR,
CommonFilterConstants.PROP_CATEGORY_IDS_FILTER_VAR_DESC));
add(new Pair<String, String>(CommonFilterConstants.PROP_CATEGORY_NAME_FILTER_VAR,
CommonFilterConstants.PROP_CATEGORY_NAME_FILTER_VAR_DESC));
add(new Pair<String, String>(CommonFilterConstants.PROP_CATEGORY_DESCRIPTION_FILTER_VAR,
CommonFilterConstants.PROP_CATEGORY_DESCRIPTION_FILTER_VAR_DESC));
}
};
<|fim▁hole|> public static List<Pair<String, String>> getFilterInfo() {
return filterPairs;
}
public static String getFilterDesc(final String varName) {
if (varName == null) return null;
final List<Pair<String, String>> filterInfo = getFilterInfo();
for (final Pair<String, String> varInfo : filterInfo) {
if (varInfo.getFirst().equals(varName)) {
return varInfo.getSecond();
}
}
return null;
}
public List<Integer> getPropertyCategoryIds() {
final String propertyCategoryIdsString = get(CommonFilterConstants.PROP_CATEGORY_IDS_FILTER_VAR);
return getIntegerList(propertyCategoryIdsString);
}
public void setPropertyCategoryIds(final List<Integer> propertyCategoryIds) {
put(CommonFilterConstants.PROP_CATEGORY_IDS_FILTER_VAR, propertyCategoryIds);
}
public String getPropertyCategoryName() {
return get(CommonFilterConstants.PROP_CATEGORY_NAME_FILTER_VAR);
}
public void setPropertyCategoryName(final String propertyCategoryName) {
put(CommonFilterConstants.PROP_CATEGORY_NAME_FILTER_VAR, propertyCategoryName);
}
public String getPropertyCategoryDescription() {
return get(CommonFilterConstants.PROP_CATEGORY_DESCRIPTION_FILTER_VAR);
}
public void setPropertyCategoryDescription(final String propertyCategoryDescription) {
put(CommonFilterConstants.PROP_CATEGORY_DESCRIPTION_FILTER_VAR, propertyCategoryDescription);
}
}<|fim▁end|> | |
<|file_name|>log_paths_dict.py<|end_file_name|><|fim▁begin|>import os
import sys
import scandir
import pymmh3 as mmh3
import misc
def log_paths_dict(d, record_key = 'input', nest_depth = 1, sep = ':',
cl_args_list = sys.argv):
'''
Records contents of dictionary d at record_key on nest_depth.
Assumes unnested elements of d follow human-name: file-path.
Values of d at record_key can be string or (nested) dict.
'''
if misc.is_scons_dry_run(cl_args_list = cl_args_list):
return None
record_dict = misc.flatten_dict(d)
record_dict = [(key, val) for key, val in sorted(record_dict.items())
if key.count(sep) >= nest_depth and val not in [None, 'None', '']]
for name, path in record_dict:
if record_key == name.split(sep)[nest_depth]:
record_dir(path, name)
return None
def record_dir(inpath, name,
include_checksum = False,
file_limit = 5000,
outpath = 'state_of_input.log'):
'''
Record relative path, size, and (optionally) checksum of all files within inpath.
Relative paths are from inpath.
Append info in |-delimited format to outpath below a heading made from inpath.
'''
inpath, name, this_file_only, do_walk = check_inpath(inpath, name)
if do_walk:
files_info = walk(inpath, include_checksum, file_limit, this_file_only)
else:
files_info = None
check_outpath(outpath)
write_log(name, files_info, outpath)
return None
def check_inpath(inpath, name):
'''
Check that inpath exists as file or directory.
If file, make inpath the file's directory and only record info for that file.
'''
this_file_only = None
do_walk = True
if os.path.isfile(inpath):
this_file_only = inpath
inpath = os.path.dirname(inpath)
elif os.path.isdir(inpath):
pass
else:
name = name + ', could not find at runtime.'
do_walk = False
return inpath, name, this_file_only, do_walk
def check_outpath(outpath):
'''
Ensure that the directory for outpath exists.
'''
dirname = os.path.dirname(outpath)
if dirname and not os.path.isdir(dirname):
os.makedirs(dirname)
return None
def walk(inpath, include_checksum, file_limit, this_file_only):
'''<|fim▁hole|> Walk in same order as os.walk.
Keep walking until there are no more subdirs or there's info on file_limit files.
'''
dirs = [inpath]
files_info, file_limit = prep_files_info(include_checksum, file_limit)
while dirs and do_more_files(files_info, file_limit):
dirs, files_info = scan_dir_wrapper(
dirs, files_info, inpath, include_checksum, file_limit, this_file_only)
return files_info
def prep_files_info(include_checksum, file_limit):
'''
Create a header for the file characteristics to grab.
Adjusts file_limit for existence of header.
'''
files_info = [['file path', 'file size in bytes']]
if include_checksum:
files_info[0].append('MurmurHash3')
file_limit += 1
return files_info, file_limit
def do_more_files(files_info, file_limit):
'''
True if files_info has fewer then file_limit elements.
'''
return bool(len(files_info) < file_limit)
def scan_dir_wrapper(dirs, files_info, inpath, include_checksum, file_limit,
this_file_only):
'''
Drop down access and output management for scan_dir.
Keep running the while loop in walk as directories are removed and added.
'''
dir_to_scan = dirs.pop(0)
subdirs, files_info = scan_dir(
dir_to_scan, files_info, inpath, include_checksum, file_limit, this_file_only)
dirs += subdirs
return dirs, files_info
def scan_dir(dir_to_scan, files_info, inpath, include_checksum, file_limit,
this_file_only):
'''
Collect names of all subdirs and all information on files.
'''
subdirs = []
entries = scandir.scandir(dir_to_scan)
for entry in entries:
if entry.is_dir(follow_symlinks = False):
if '.git' in entry.path or '.svn' in entry.path:
continue
else:
subdirs.append(entry.path)
elif entry.is_file() and (this_file_only is None or this_file_only == entry.path):
f_info = get_file_information(entry, inpath, include_checksum)
files_info.append(f_info)
if not do_more_files(files_info, file_limit):
break
return subdirs, files_info
def get_file_information(f, inpath, include_checksum):
'''
Grabs path and size from scandir file object.
Will compute file's checksum if asked.
'''
f_path = os.path.relpath(f.path, inpath).strip()
f_size = str(f.stat().st_size)
f_info = [f_path, f_size]
if include_checksum:
with open(f.path, 'rU') as infile:
f_checksum = str(mmh3.hash128(infile.read(), 2017))
f_info.append(f_checksum)
return f_info
def write_log(name, files_info, outpath):
'''
Write file information to outpath under a nice header.
'''
out_name = misc.make_heading(name)
if files_info is not None:
out_files_info = ['|'.join(l) for l in files_info]
out_files_info = '\n'.join(out_files_info)
else:
out_files_info = ''
with open(outpath, 'ab') as f:
f.write(out_name)
f.write(out_files_info)
f.write('\n\n')
return None<|fim▁end|> | Walk through inpath and grab paths to all subdirs and info on all files. |
<|file_name|>plot-vtk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
def load_velocity(filename):
import os
if not os.path.exists(filename):
return None
from numpy import zeros
from vtk import vtkPolyDataReader, vtkCellDataToPointData
reader = vtkPolyDataReader()
reader.SetFileName(filename)
reader.ReadAllVectorsOn()
reader.Update()
data = reader.GetOutput()
# Extracting triangulation information
triangles = data.GetPolys().GetData()
points = data.GetPoints()
# Mapping data: cell -> point
mapper = vtkCellDataToPointData()
mapper.AddInputData(data)
mapper.Update()
mapped_data = mapper.GetOutput()
# Extracting interpolate point data
udata = mapped_data.GetPointData().GetArray(0)
ntri = triangles.GetNumberOfTuples()/4
npts = points.GetNumberOfPoints()
nvls = udata.GetNumberOfTuples()
tri = zeros((ntri, 3))
x = zeros(npts)
y = zeros(npts)
ux = zeros(nvls)
uy = zeros(nvls)
for i in xrange(0, ntri):
tri[i, 0] = triangles.GetTuple(4*i + 1)[0]
tri[i, 1] = triangles.GetTuple(4*i + 2)[0]
tri[i, 2] = triangles.GetTuple(4*i + 3)[0]<|fim▁hole|> y[i] = pt[1]
for i in xrange(0, nvls):
U = udata.GetTuple(i)
ux[i] = U[0]
uy[i] = U[1]
return (x, y, tri, ux, uy)
def plot(filename):
import os
from matplotlib.pyplot import clf, tricontour, tricontourf, \
gca, savefig, rc, minorticks_on
if not os.path.exists(filename):
return -1
rc('text', usetex=True)
clf()
x, y, tri, ux, uy = load_velocity(filename)
tricontourf(x, y, tri, ux, 16)
tricontour(x, y, tri, ux, 16, linestyles='-',
colors='black', linewidths=0.5)
minorticks_on()
gca().set_aspect('equal')
gca().tick_params(direction='out', which='both')
gca().set_xticklabels([])
gca().set_yticklabels([])
name, _ = os.path.splitext(filename)
name = os.path.basename(name)
savefig('{0}.png'.format(name), dpi=300, bbox_inches='tight')
savefig('{0}.pdf'.format(name), bbox_inches='tight')
if __name__ == '__main__':
import sys
if len(sys.argv) < 2:
print 'usage: {0} [FILENAME]'.format(sys.argv[0])
sys.exit(-1)
sys.exit(plot(sys.argv[1]))<|fim▁end|> |
for i in xrange(npts):
pt = points.GetPoint(i)
x[i] = pt[0] |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# HobsonPy documentation build configuration file, created by
# sphinx-quickstart on Sat Sep 16 18:59:19 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'HobsonPy'
copyright = '2017, Al Sweigart'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".<|fim▁hole|># directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'HobsonPydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'HobsonPy.tex', 'HobsonPy Documentation',
'Al Sweigart', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'hobsonpy', 'HobsonPy Documentation',
['Al Sweigart'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'HobsonPy', 'HobsonPy Documentation',
'Al Sweigart', 'HobsonPy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied |
<|file_name|>simple_browse.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys, os, urllib, argparse, base64, time, threading, re
from gi.repository import Gtk, WebKit, Notify
webView = None
def refresh(widget, event):
global webView
webView.reload()
window_title = ''
def HandleTitleChanged(webview, title):
global window_title
window_title = title
parent = webview
while parent.get_parent() != None:
parent = webview.get_parent()
parent.set_title(title)
return True
def HandleCreateWebView(webview, frame):
info = Gtk.Window()
info.set_default_size(1000, 700)
child = WebKit.WebView()
child.connect('create-web-view', HandleCreateWebView)
child.connect('close-web-view', HandleCloseWebView)
child.connect('navigation-policy-decision-requested', HandleNavigationRequested)
#child.connect('notify::title', HandleTitleChanged)
info.set_title('')
info.add(child)
info.show_all()
return child
def HandleCloseWebView(webview):
parent = webview
while parent.get_parent() != None:
parent = webview.get_parent()
parent.destroy()
def HandleNewWindowPolicyDecisionRequested(webview, frame, request, navigation_action, policy_decision):
if '&URL=' in request.get_uri():
os.system('xdg-open "%s"' % urllib.unquote(request.get_uri().split('&URL=')[1]).decode('utf8'))
def HandleNavigationRequested(webview, frame, request, navigation_action, policy_decision):
if '&URL=' in request.get_uri():
HandleCloseWebView(webview)
return 1
prefills = {}
submit = False
ignore_submit = []
def prefill_password(webview, frame):
global prefills, submit
should_ignore_submit = False
dom = webview.get_dom_document()
forms = dom.get_forms()
for i in range(0, forms.get_length()):
form = forms.item(i)
elements = form.get_elements()
is_form_modified = False
for j in range(0, elements.get_length()):
element = elements.item(j)
element_name = element.get_name()
if element_name in ignore_submit:
should_ignore_submit = True
for key in prefills.keys():
if element_name == key:
if prefills[key].lower() == 'true':
element.set_checked(True)
is_form_modified = True
else:
element.set_value(prefills[key])
is_form_modified = True
if is_form_modified and submit and not should_ignore_submit:
form.submit()
def HandleMimeType(webview, frame, request, mimetype, policy_decision):
print 'Requested decision for mimetype:', mimetype
return True
stop_threads = False
search_notifys = []
def SearchNotify(webview):
global stop_threads
global window_title
global search_notifys
while True:
if stop_threads:
break
dom = webview.get_dom_document()
if not dom:
continue
body = dom.get_body()
if not body:
continue<|fim▁hole|> continue
for notice in search_notifys:
msgs = list(set(re.findall(notice, body_html)))
if len(msgs) > 0:
for msg in msgs:
Notify.init(window_title)
msg_notify = Notify.Notification.new(window_title, msg, "dialog-information")
msg_notify.show()
time.sleep(2) # Don't duplicate the notification
time.sleep(2)
if __name__ == "__main__":
parser_epilog = ("Example:\n\n"
"./simple_browse.py https://owa.example.com --useragent=\"Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0\" --stylesheet=~/simple_browse/sample_styles/owa_style.css --username=<webmail username> --b64pass=\"<base64 encoded password>\" --forminput=trusted:true --submit --notify=PHNwYW4gY2xhc3M9Im53SXRtVHh0U2JqIj4oW1x3IF0rKTwvc3Bhbj4=\n\n"
"This command will open Outlook Web Access, set the user agent to allow it to \nload using pipelight (for silverlight support), login to webmail, then apply a \ncustom css style to make webmail look like a desktop app. When new emails\narrive, notification will be sent to gnome-shell.\n")
parser = argparse.ArgumentParser(description="Simple Browser: A simple webkit browser written in Python", epilog=parser_epilog, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("url")
parser.add_argument("--useragent", help="An optional user agent to apply to the main page")
parser.add_argument("--stylesheet", help="An optional stylesheet to apply to the main page")
parser.add_argument("--username", help="A username we'll try to use to sign in")
parser.add_argument("--password", help="A password for signing in")
parser.add_argument("--b64pass", help="An alternative b64 encoded password for sign on")
parser.add_argument("--forminput", help="A form field name and value to prefill (seperated by a colon). Only one value for each key is allowed.", action='append')
parser.add_argument("--submit", help="Submit the filled form when we've finished entering values", action="store_true")
parser.add_argument("--ignore-submit", help="Ignore the submit if the form contains this key", action='append')
parser.add_argument("--title", help="Title for the window")
parser.add_argument("--notify", help="A regex search string, base64 encoded, which will display a notification when found, example: <span class=\"nwItmTxtSbj\">([\w ]+)</span>", action='append')
args = parser.parse_args()
url = args.url
user_agent = None
if args.useragent:
user_agent = args.useragent
stylesheet = None
if args.stylesheet:
stylesheet = 'file://localhost%s' % os.path.abspath(args.stylesheet)
if args.username:
prefills['username'] = args.username
if args.b64pass:
prefills['password'] = base64.b64decode(args.b64pass)
elif args.password:
prefills['password'] = args.password
if args.submit:
submit = True
if args.forminput:
for field in args.forminput:
key, value = field.split(':')
if key in prefills:
parser.print_help()
exit(1)
prefills[key] = value
if args.ignore_submit:
ignore_submit.extend(args.ignore_submit)
if args.notify:
for notice in args.notify:
search_notifys.append(base64.b64decode(notice))
win = Gtk.Window()
scrolled = Gtk.ScrolledWindow()
win.set_default_size(1500, 900)
webView = WebKit.WebView()
webView.load_uri(url)
overlay = Gtk.Overlay()
overlay.add(webView)
# Apply Settings
settings = WebKit.WebSettings()
if user_agent:
settings.set_property('user-agent', user_agent)
settings.set_property('enable-spell-checking', True)
if stylesheet:
settings.set_property('user-stylesheet-uri', stylesheet)
webView.set_settings(settings)
# Add Signal handlers to the webview
webView.connect('create-web-view', HandleCreateWebView)
webView.connect('close-web-view', HandleCloseWebView)
webView.connect('new-window-policy-decision-requested', HandleNewWindowPolicyDecisionRequested)
webView.connect('navigation-policy-decision-requested', HandleNavigationRequested)
#webView.connect('notify::title', HandleTitleChanged)
webView.connect('mime-type-policy-decision-requested', HandleMimeType)
webView.connect('load-finished', prefill_password)
win.set_title('')
# Add the Refresh button
fixed = Gtk.Fixed()
fixed.set_halign(Gtk.Align.START)
fixed.set_valign(Gtk.Align.START)
overlay.add_overlay(fixed)
fixed.show()
image = Gtk.Image()
image.set_from_pixbuf(Gtk.IconTheme().load_icon('gtk-refresh', 10, 0))
imgevent = Gtk.EventBox()
imgevent.add(image)
imgevent.connect('button-press-event', refresh)
fixed.put(imgevent, 10, 10)
win.add(scrolled)
scrolled.add(overlay)
win.show_all()
win.connect('destroy', Gtk.main_quit)
if args.title:
window_title = args.title
win.set_title(args.title)
if search_notifys:
t = threading.Thread(target=SearchNotify, args=(webView,))
t.start()
Gtk.main()
stop_threads = True<|fim▁end|> | body_html = body.get_inner_html()
if not body_html: |
<|file_name|>waitable_event_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
<|fim▁hole|>
namespace base {
TEST(WaitableEventTest, ManualBasics) {
WaitableEvent event(true, false);
EXPECT_FALSE(event.IsSignaled());
event.Signal();
EXPECT_TRUE(event.IsSignaled());
EXPECT_TRUE(event.IsSignaled());
event.Reset();
EXPECT_FALSE(event.IsSignaled());
EXPECT_FALSE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
event.Signal();
event.Wait();
EXPECT_TRUE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
}
TEST(WaitableEventTest, AutoBasics) {
WaitableEvent event(false, false);
EXPECT_FALSE(event.IsSignaled());
event.Signal();
EXPECT_TRUE(event.IsSignaled());
EXPECT_FALSE(event.IsSignaled());
event.Reset();
EXPECT_FALSE(event.IsSignaled());
EXPECT_FALSE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
event.Signal();
event.Wait();
EXPECT_FALSE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
event.Signal();
EXPECT_TRUE(event.TimedWait(TimeDelta::FromMilliseconds(10)));
}
TEST(WaitableEventTest, WaitManyShortcut) {
WaitableEvent* ev[5];
for (unsigned i = 0; i < 5; ++i)
ev[i] = new WaitableEvent(false, false);
ev[3]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 3u);
ev[3]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 3u);
ev[4]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 4u);
ev[0]->Signal();
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 0u);
for (unsigned i = 0; i < 5; ++i)
delete ev[i];
}
class WaitableEventSignaler : public PlatformThread::Delegate {
public:
WaitableEventSignaler(double seconds, WaitableEvent* ev)
: seconds_(seconds),
ev_(ev) {
}
void ThreadMain() {
PlatformThread::Sleep(TimeDelta::FromSeconds(static_cast<int>(seconds_)));
ev_->Signal();
}
private:
const double seconds_;
WaitableEvent *const ev_;
};
TEST(WaitableEventTest, WaitMany) {
WaitableEvent* ev[5];
for (unsigned i = 0; i < 5; ++i)
ev[i] = new WaitableEvent(false, false);
WaitableEventSignaler signaler(0.1, ev[2]);
PlatformThreadHandle thread;
PlatformThread::Create(0, &signaler, &thread);
EXPECT_EQ(WaitableEvent::WaitMany(ev, 5), 2u);
PlatformThread::Join(thread);
for (unsigned i = 0; i < 5; ++i)
delete ev[i];
}
} // namespace base<|fim▁end|> | #include "base/time.h"
#include "base/synchronization/waitable_event.h"
#include "base/threading/platform_thread.h"
#include "testing/gtest/include/gtest/gtest.h" |
<|file_name|>app.py<|end_file_name|><|fim▁begin|>""" Configuration for Flask app """
import os
import urllib
from flask import (Flask, abort, flash, Response)
from playhouse.flask_utils import FlaskDB
ADMIN_PASSWORD = 'secret'
APP_DIR = os.path.dirname(os.path.realpath(__file__))
DATABASE = 'sqliteext:///%s' % os.path.join(APP_DIR, 'blog.db')
DEBUG = False
SECRET_KEY = 'shhh, secret!' # Used by Flask to encrypt session cookie.
SITE_WIDTH = 800
app = Flask(__name__)
app.config.from_object(__name__)
flask_db = FlaskDB(app)
database = flask_db.database
from models import Entry, FTSEntry
database.create_tables([Entry, FTSEntry], safe=True)
# Setup routes
import views
app.add_url_rule('/login/', 'login', views.login, methods=['GET', 'POST'])
app.add_url_rule('/logout/', 'logout', views.logout, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', views.index, methods=['GET'])<|fim▁hole|>
@app.template_filter('clean_querystring')
def clean_querystring(request_args, *keys_to_remove, **new_values):
querystring = dict((key, value) for key, value in request_args.items())
for key in keys_to_remove:
querystring.pop(key, None)
querystring.update(new_values)
return urllib.urlencode(querystring)
@app.errorhandler(404)
def not_found(exc):
return Response('<h3>404 Error: Page Not found</h3>'), 404<|fim▁end|> | app.add_url_rule('/create', 'create', views.create, methods=['GET', 'POST'])
app.add_url_rule('/drafts', 'drafts', views.drafts, methods=['GET'])
app.add_url_rule('/<slug>', 'detail', views.detail, methods=['GET'])
app.add_url_rule('/<slug>/edit', 'edit', views.edit, methods=['GET', 'POST']) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Anonymous token-based surveys<|fim▁hole|><|fim▁end|> | """ |
<|file_name|>horton-esp-test.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2015 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
#--
import sys, argparse, os, numpy as np
from horton import log, __version__
from horton.scripts.common import parse_h5, store_args, check_output, \
write_script_output
from horton.scripts.espfit import load_charges, load_cost
# All, except underflows, is *not* fine.
np.seterr(divide='raise', over='raise', invalid='raise')
def parse_args():
parser = argparse.ArgumentParser(prog='horton-esp-test.py',
description='Test how well charges reproduce the ESP.')
parser.add_argument('-V', '--version', action='version',
version="%%(prog)s (HORTON version %s)" % __version__)
parser.add_argument('cost',
help='The location of the cost function in the form '
'"file.h5:group/cost". This argument must be the same as the '
'output argument of the script horton-esp-cost.py.')
parser.add_argument('charges', type=str,
help='The atomic charges to be used in the form '
'"file.h5:group/charges". ')
parser.add_argument('output', type=str,
help='The output destination in the form file.h5:group. The colon and '
'the group name are optional. When omitted, the root group of the '
'HDF5 file is used.')
parser.add_argument('--overwrite', default=False, action='store_true',
help='Overwrite existing output in the HDF5 file')
parser.add_argument('--qtot', '-q', default=None, type=float,
help='The total charge of the system. When given, the charges from the '
'HDF5 file are corrected.')
return parser.parse_args()
def main():
args = parse_args()
fn_h5, grp_name = parse_h5(args.output, 'output')
# check if the group is already present (and not empty) in the output file
if check_output(fn_h5, grp_name, args.overwrite):
return
# Load the cost function from the HDF5 file
cost, used_volume = load_cost(args.cost)
# Load the charges from the HDF5 file
charges = load_charges(args.charges)
# Fix total charge if requested
if args.qtot is not None:
charges -= (charges.sum() - args.qtot)/len(charges)
# Store parameters in output
results = {}
results['qtot'] = charges.sum()
# Fitness of the charges
results['cost'] = cost.value_charges(charges)
if results['cost'] < 0:
results['rmsd'] = 0.0
else:
results['rmsd'] = (results['cost']/used_volume)**0.5
# Worst case stuff
results['cost_worst'] = cost.worst(0.0)
if results['cost_worst'] < 0:
results['rmsd_worst'] = 0.0
else:
results['rmsd_worst'] = (results['cost_worst']/used_volume)**0.5
# Write some things on screen
if log.do_medium:
log('RMSD charges: %10.5e' % np.sqrt((charges**2).mean()))
log('RMSD ESP: %10.5e' % results['rmsd'])
log('Worst RMSD ESP: %10.5e' % results['rmsd_worst'])
log.hline()
# Store the results in an HDF5 file
write_script_output(fn_h5, grp_name, results, args)
if __name__ == '__main__':
main()<|fim▁end|> | #!/usr/bin/env python |
<|file_name|>BinaryTreeLevelOrderTraversalSolutionTest.java<|end_file_name|><|fim▁begin|>package BinaryTreeLevelOrderTraversal;
import org.junit.Test;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
<|fim▁hole|>
private BinaryTreeLevelOrderTraversalSolution solution = new BinaryTreeLevelOrderTraversalSolution();
@Test
public void nullInput() {
List<List<Integer>> result = solution.levelOrder(null);
assertThat(result).isEmpty();
}
@Test
public void singleNodeInput() {
TreeNode root = new TreeNode(1);
List<List<Integer>> result = solution.levelOrder(root);
assertThat(result).hasSize(1);
assertThat(result.get(0)).containsExactly(1);
}
@Test
public void twoNodesInput() {
TreeNode root = new TreeNode(1);
root.right = new TreeNode(2);
List<List<Integer>> result = solution.levelOrder(root);
assertThat(result).hasSize(2);
assertThat(result.get(0)).containsExactly(1);
assertThat(result.get(1)).containsExactly(2);
}
@Test
public void threeNodesInput() {
TreeNode root = new TreeNode(1);
root.left = new TreeNode(2);
root.right = new TreeNode(3);
List<List<Integer>> result = solution.levelOrder(root);
assertThat(result).hasSize(2);
assertThat(result.get(0)).containsExactly(1);
assertThat(result.get(1)).containsExactly(2, 3);
}
@Test
public void fiveNodesInput() {
TreeNode root = new TreeNode(3);
root.left = new TreeNode(9);
TreeNode rightTree = new TreeNode(20);
rightTree.left = new TreeNode(15);
rightTree.right = new TreeNode(7);
root.right = rightTree;
List<List<Integer>> result = solution.levelOrder(root);
assertThat(result).hasSize(3);
assertThat(result.get(0)).containsExactly(3);
assertThat(result.get(1)).containsExactly(9, 20);
assertThat(result.get(2)).containsExactly(15, 7);
}
}<|fim▁end|> | public class BinaryTreeLevelOrderTraversalSolutionTest { |
<|file_name|>schema.ts<|end_file_name|><|fim▁begin|>export type Schema = {
type: 'boolean' | 'number' | 'string' | 'array' | 'object' | 'any';
nullable: boolean;
optional: boolean;
items?: Schema;
properties?: Obj;
description?: string;
example?: any;
format?: string;
ref?: string;
enum?: string[];
};
type NonUndefinedPropertyNames<T extends Obj> = {
[K in keyof T]: T[K]['optional'] extends true ? never : K
}[keyof T];
type UndefinedPropertyNames<T extends Obj> = {
[K in keyof T]: T[K]['optional'] extends true ? K : never
}[keyof T];
type OnlyRequired<T extends Obj> = Pick<T, NonUndefinedPropertyNames<T>>;
type OnlyOptional<T extends Obj> = Pick<T, UndefinedPropertyNames<T>>;
export type Obj = { [key: string]: Schema };
export type ObjType<s extends Obj> =
{ [P in keyof OnlyOptional<s>]?: SchemaType<s[P]> } &
{ [P in keyof OnlyRequired<s>]: SchemaType<s[P]> };
// https://qiita.com/hrsh7th@github/items/84e8968c3601009cdcf2
type MyType<T extends Schema> = {
0: any;
1: SchemaType<T>;
}[T extends Schema ? 1 : 0];
type NullOrUndefined<p extends Schema, T> =
p['nullable'] extends true
? p['optional'] extends true
? (T | null | undefined)
: (T | null)
: p['optional'] extends true
? (T | undefined)
: T;
export type SchemaType<p extends Schema> =
p['type'] extends 'number' ? NullOrUndefined<p, number> :
p['type'] extends 'string' ? NullOrUndefined<p, string> :<|fim▁hole|> p['type'] extends 'array' ? NullOrUndefined<p, MyType<NonNullable<p['items']>>[]> :
p['type'] extends 'object' ? NullOrUndefined<p, ObjType<NonNullable<p['properties']>>> :
p['type'] extends 'any' ? NullOrUndefined<p, any> :
any;<|fim▁end|> | p['type'] extends 'boolean' ? NullOrUndefined<p, boolean> : |
<|file_name|>Label.js<|end_file_name|><|fim▁begin|>/*
* Ext JS Library 2.3.0
* Copyright(c) 2006-2009, Ext JS, LLC.
<|fim▁hole|> * [email protected]
*
* http://extjs.com/license
*/
/**
* @class Ext.form.Label
* @extends Ext.BoxComponent
* Basic Label field.
* @constructor
* Creates a new Label
* @param {Ext.Element/String/Object} config The configuration options. If an element is passed, it is set as the internal
* element and its id used as the component id. If a string is passed, it is assumed to be the id of an existing element
* and is used as the component id. Otherwise, it is assumed to be a standard config object and is applied to the component.
*/
Ext.form.Label = Ext.extend(Ext.BoxComponent, {
/**
* @cfg {String} text The plain text to display within the label (defaults to ''). If you need to include HTML
* tags within the label's innerHTML, use the {@link #html} config instead.
*/
/**
* @cfg {String} forId The id of the input element to which this label will be bound via the standard HTML 'for'
* attribute. If not specified, the attribute will not be added to the label.
*/
/**
* @cfg {String} html An HTML fragment that will be used as the label's innerHTML (defaults to '').
* Note that if {@link #text} is specified it will take precedence and this value will be ignored.
*/
// private
onRender : function(ct, position){
if(!this.el){
this.el = document.createElement('label');
this.el.id = this.getId();
this.el.innerHTML = this.text ? Ext.util.Format.htmlEncode(this.text) : (this.html || '');
if(this.forId){
this.el.setAttribute('for', this.forId);
}
}
Ext.form.Label.superclass.onRender.call(this, ct, position);
},
/**
* Updates the label's innerHTML with the specified string.
* @param {String} text The new label text
* @param {Boolean} encode (optional) False to skip HTML-encoding the text when rendering it
* to the label (defaults to true which encodes the value). This might be useful if you want to include
* tags in the label's innerHTML rather than rendering them as string literals per the default logic.
* @return {Label} this
*/
setText: function(t, encode){
var e = encode === false;
this[!e ? 'text' : 'html'] = t;
delete this[e ? 'text' : 'html'];
if(this.rendered){
this.el.dom.innerHTML = encode !== false ? Ext.util.Format.htmlEncode(t) : t;
}
return this;
}
});
Ext.reg('label', Ext.form.Label);<|fim▁end|> | |
<|file_name|>decorators_chromium.py<|end_file_name|><|fim▁begin|># Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decorators for Chromium port of Rietveld."""
import mimetypes
import sha
from google.appengine.api import memcache
from django.http import HttpResponseForbidden
from . import decorators as deco
from . import models_chromium
from . import responses
def binary_required(func):
"""Decorator that processes the content argument.
Attributes set on the request:
content: a Content entity.
"""
@deco.patch_required
def binary_wrapper(request, content_type, *args, **kwds):
if content_type == "0":
content_key = request.patch.content_key
elif content_type == "1":
content_key = request.patch.patched_content_key
if not content_key or not content_key.get().data:
# The file was not modified. It was likely moved without modification.
# Return the original file.
content_key = request.patch.content_key
else:
# Other values are erroneous so request.content won't be set.
return responses.HttpTextResponse(
'Invalid content type: %s, expected 0 or 1' % content_type,
status=404)
request.mime_type = mimetypes.guess_type(request.patch.filename)[0]
request.content = content_key.get()
return func(request, *args, **kwds)
return binary_wrapper
<|fim▁hole|> """Decorator that insists that you are using a specific key."""
@deco.require_methods('POST')
def key_wrapper(request, *args, **kwds):
key = request.POST.get('password')
if request.user or not key:
return HttpResponseForbidden('You must be admin in for this function')
value = memcache.get('key_required')
if not value:
obj = models_chromium.Key.query().get()
if not obj:
# Create a dummy value so it can be edited from the datastore admin.
obj = models_chromium.Key(hash='invalid hash')
obj.put()
value = obj.hash
memcache.add('key_required', value, 60)
if sha.new(key).hexdigest() != value:
return HttpResponseForbidden('You must be admin in for this function')
return func(request, *args, **kwds)
return key_wrapper<|fim▁end|> |
def key_required(func): |
<|file_name|>pyquora.py<|end_file_name|><|fim▁begin|>import urllib2
from lxml import etree
####################################################################
# API
####################################################################
class Scrape_Quora:
regexpNS = "http://exslt.org/regular-expressions"
@staticmethod
def get_name(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser()
tree = etree.parse(response, htmlparser)
name = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/div/h1/span/text()', namespaces={'re':Scrape_Quora.regexpNS})[0]
return name
@staticmethod
def get_url(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
URL = response.geturl()
return URL
@staticmethod
def get_profile_picture_link(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser()
tree = etree.parse(response, htmlparser)
profile_picture_link = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/div/img/@data-src', namespaces={'re':Scrape_Quora.regexpNS})[0]
return profile_picture_link
@staticmethod
def get_no_of_questions(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser()
tree = etree.parse(response, htmlparser)
no_of_questions = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/li/a[text()="Questions"]/span/text()', namespaces={'re':Scrape_Quora.regexpNS})[0]
return no_of_questions
@staticmethod
def get_no_of_answers(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser()
tree = etree.parse(response, htmlparser)
no_of_answers = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/li/a[text()="Answers"]/span/text()', namespaces={'re':Scrape_Quora.regexpNS})[0]
return no_of_answers
@staticmethod<|fim▁hole|> tree = etree.parse(response, htmlparser)
no_of_followers = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/li/a[text()="Followers "]/span/text()', namespaces={'re':Scrape_Quora.regexpNS})[0]
return no_of_followers
@staticmethod
def get_no_of_following(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser()
tree = etree.parse(response, htmlparser)
no_of_following = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/li/a[text()="Following "]/span/text()', namespaces={'re':Scrape_Quora.regexpNS})[0]
return no_of_following
@staticmethod
def get_no_of_edits(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser()
tree = etree.parse(response, htmlparser)
no_of_edits = tree.xpath('//*[re:test(@id, "ld_[a-z]+_\\d+", g)]/li/a[text()="Edits"]/span/text()', namespaces={'re':Scrape_Quora.regexpNS})[0]
return no_of_edits<|fim▁end|> | def get_no_of_followers(user_name):
url = 'https://www.quora.com/profile/' + user_name
response = urllib2.urlopen(url)
htmlparser = etree.HTMLParser() |
<|file_name|>id_generator.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
impl IdGenerator {
pub fn new(initial: usize) -> IdGenerator {
IdGenerator {next_id: initial}
}
pub fn generate(&mut self) -> usize {
let current = self.next_id;
self.next_id = current + 1;
current
}
}
#[test]
fn test_new_should_succeed() {
IdGenerator::new(42);
}
#[test]
fn test_first_generated_should_be_equal_to_initial() {
assert_eq!(IdGenerator::new(42).generate(), 42);
}<|fim▁end|> | #[derive(Clone, Debug)]
pub struct IdGenerator {
next_id: usize,
} |
<|file_name|>RelayConfigLoader.java<|end_file_name|><|fim▁begin|>/**
*
* Copyright 2016 David Strawn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> *
*/
package strawn.longleaf.relay.util;
import java.io.FileInputStream;
import java.io.IOException;
import java.util.Properties;
/**
*
* @author David Strawn
*
* This class loads configuration from the file system.
*
*/
public class RelayConfigLoader {
private static final String serverConfigLocation = "./resources/serverconfig.properties";
private static final String clientConfigLocation = "./resources/clientconfig.properties";
private final Properties properties;
public RelayConfigLoader() {
properties = new Properties();
}
public void loadServerConfig() throws IOException {
loadConfigFromPath(serverConfigLocation);
}
public void loadClientConfig() throws IOException {
loadConfigFromPath(clientConfigLocation);
}
/**
* Use this method if you want to use a different location for the configuration.
* @param configFileLocation - location of the configuration file
* @throws IOException
*/
public void loadConfigFromPath(String configFileLocation) throws IOException {
FileInputStream fileInputStream = new FileInputStream(configFileLocation);
properties.load(fileInputStream);
fileInputStream.close();
}
public int getPort() {
return Integer.parseInt(properties.getProperty("port"));
}
public String getHost() {
return properties.getProperty("host");
}
}<|fim▁end|> | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. |
<|file_name|>test_run.py<|end_file_name|><|fim▁begin|>import RPi.GPIO as GPIO
import time
import utils
GPIO.setmode(GPIO.BOARD)
pwr = utils.PSU(13, 15)
pwr.on()
print "Power on"
fan = utils.Fan(32)
fan.setDC(90)
print "Fan set at 90%"
pump = utils.NMOS(11)
pump.on()
print "Pump on"
relay = utils.NMOS(16)
relay.on()
print "Peltier on"
t_amb = utils.Therm('28-000004e08693')
t_c_b = utils.Therm('28-000004e0f7cc')
t_c_m = utils.Therm('28-000004e0840a')
t_c_t = utils.Therm('28-000004e08e26')
t_hs = utils.Therm('28-000004e0804f')
try:
while(1):
t_amb.store_temp()
t_c_b.store_temp()
t_c_m.store_temp()
t_c_t.store_temp()
t_hs.store_temp()
# print "Ambient temperature: " + str(t_amb.store_temp())
# print "Down temperature in cooler: " + str(t_c_b.store_temp())
# print "Middle temperature in cooler: " + str(t_c_m.store_temp())
# print "Up temperture in cooler: " + str(t_c_t.store_temp())
# print "Heatsink temperature: " + str(t_hs.store_temp())
except KeyboardInterrupt:
print "Exiting gracefully"
relay.off()
print "Peltier off"
pump.off()
print "Pump off"
pwr.off()
print "Power off"<|fim▁hole|>
GPIO.cleanup()
print "Goodbye!"<|fim▁end|> | |
<|file_name|>mean.rs<|end_file_name|><|fim▁begin|>//! Functions for calculating mean
use std::f64::NAN;
/// Calculates arithmetic mean (AM) of data set `slice`.
///
/// # Arguments
///
/// * `slice` - collection of values
///
/// # Example
///
/// ```
/// use math::mean;
///
/// let slice = [8., 16.];
/// assert_eq!(mean::arithmetic(&slice), 12.);
/// ```
pub fn arithmetic(slice: &[f64]) -> f64 {
slice.iter().fold(0., |a, b| a + b) / slice.len() as f64
}
/// Calculate geometric mean (GM) of data set `slice`.
///
/// If the result would be imaginary, function returns `NAN`.
///
/// # Arguments
///
/// * `slice` - collection of values
///
/// # Example
///
/// ```
/// use math::mean;
///
/// let slice = [9., 16.];
/// assert_eq!(mean::geometric(&slice), 12.);<|fim▁hole|>pub fn geometric(slice: &[f64]) -> f64 {
let product = slice.iter().fold(1., |a, b| a * b);
match product < 0. {
true => NAN,
false => product.powf(1. / slice.len() as f64),
}
}
/// Calculate harmonic mean (HM) of data set `slice`.
///
/// # Arguments
///
/// * `slice` - collection of values
///
/// # Example
///
/// ```
/// use math::mean;
///
/// let slice = [1., 7.];
/// assert_eq!(mean::harmonic(&slice), 1.75);
/// ```
pub fn harmonic(slice: &[f64]) -> f64 {
slice.len() as f64 / slice.iter().fold(0., |a, b| a + 1. / b)
}
#[cfg(test)]
mod tests {
use std::f64::{ NAN, INFINITY, NEG_INFINITY };
use round;
macro_rules! test_mean {
($func:path [ $($name:ident: $params:expr,)* ]) => {
$(
#[test]
fn $name() {
let (slice, expected): (&[f64], f64) = $params;
let result = $func(slice);
match result.is_nan() {
true => assert_eq!(expected.is_nan(), true),
false => assert_eq!(round::half_up(result, 6), expected),
}
}
)*
}
}
test_mean! { super::arithmetic [
arithmetic_1: (&[-7., -4., 1., 3., 8.], 0.2),
arithmetic_2: (&[-4., 1., 3., 8., 12.], 4.),
arithmetic_3: (&[0., 0., 0., 0., 0.], 0.),
arithmetic_4: (&[0., 4., 7., 9., 17.], 7.4),
arithmetic_5: (&[1., 2., 6., 4., 13.], 5.2),
arithmetic_6: (&[1., 5., 10., 20., 25.], 12.2),
arithmetic_7: (&[2., 3., 5., 7., 11.], 5.6),
arithmetic_8: (&[NEG_INFINITY, 1., 2., 3., 4.], NEG_INFINITY),
arithmetic_9: (&[1., 2., 3., 4., INFINITY], INFINITY),
]}
test_mean! { super::geometric [
geometric_1: (&[-7., -4., 1., 3., 8.], 3.676833),
geometric_2: (&[-4., 1., 3., 8., 12.], NAN),
geometric_3: (&[0., 0., 0., 0., 0.], 0.),
geometric_4: (&[0., 4., 7., 9., 17.], 0.),
geometric_5: (&[1., 2., 6., 4., 13.], 3.622738),
geometric_6: (&[1., 5., 10., 20., 25.], 7.578583),
geometric_7: (&[2., 3., 5., 7., 11.], 4.706764),
geometric_8: (&[NEG_INFINITY, 1., 2., 3., 4.], NAN),
geometric_9: (&[1., 2., 3., 4., INFINITY], INFINITY),
]}
test_mean! { super::harmonic [
harmonic_1: (&[-7., -4., 1., 3., 8.], 4.692737),
harmonic_2: (&[-4., 1., 3., 8., 12.], 3.870968),
harmonic_3: (&[0., 0., 0., 0., 0.], 0.),
harmonic_4: (&[0., 4., 7., 9., 17.], 0.),
harmonic_5: (&[1., 2., 6., 4., 13.], 2.508039),
harmonic_6: (&[1., 5., 10., 20., 25.], 3.597122),
harmonic_7: (&[2., 3., 5., 7., 11.], 3.94602),
harmonic_8: (&[NEG_INFINITY, 1., 2., 3., 4.], 2.4),
harmonic_9: (&[1., 2., 3., 4., INFINITY], 2.4),
]}
}<|fim▁end|> | /// ``` |
<|file_name|>aff4.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""AFF4 interface implementation.
This contains an AFF4 data model implementation.
"""
import __builtin__
import abc
import StringIO
import time
import zlib
import logging
from grr.lib import access_control
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import lexer
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import utils
from grr.lib.rdfvalues import grr_rdf
config_lib.DEFINE_integer(
"AFF4.cache_age", 5,
"The number of seconds AFF4 objects live in the cache.")
config_lib.DEFINE_integer(
"AFF4.notification_rules_cache_age", 60,
"The number of seconds AFF4 notification rules are cached.")
# Factor to convert from seconds to microseconds
MICROSECONDS = 1000000
# Age specifications for opening AFF4 objects.
NEWEST_TIME = "NEWEST_TIME"
ALL_TIMES = "ALL_TIMES"
# Just something to write on an index attribute to make it exist.
EMPTY_DATA = "X"
AFF4_PREFIXES = ["aff4:.*", "metadata:.*"]
class Error(Exception):
pass
class LockError(Error):
pass
class InstanciationError(Error, IOError):
pass
class LockContextManager(object):
def __init__(self, aff4_obj, sync):
self.aff4_obj = aff4_obj
self.sync = sync
def __enter__(self):
return self.aff4_obj
def __exit__(self, unused_type, unused_value, unused_traceback):
self.aff4_obj.Close(sync=self.sync)
class Factory(object):
"""A central factory for AFF4 objects."""
def __init__(self):
# This is a relatively short lived cache of objects.
self.cache = utils.AgeBasedCache(
max_size=10000,
max_age=config_lib.CONFIG["AFF4.cache_age"])
self.intermediate_cache = utils.FastStore(2000)
# Create a token for system level actions:
self.root_token = access_control.ACLToken(username="system",
reason="Maintainance")
self.root_token.supervisor = True
self.notification_rules = []
self.notification_rules_timestamp = 0
@classmethod
def ParseAgeSpecification(cls, age):
"""Parses an aff4 age and returns a datastore age specification."""
try:
return (0, int(age))
except (ValueError, TypeError):
pass
if age == NEWEST_TIME:
return data_store.DB.NEWEST_TIMESTAMP
elif age == ALL_TIMES:
return data_store.DB.ALL_TIMESTAMPS
elif len(age) == 2:
start, end = age
return (int(start), int(end))
raise RuntimeError("Unknown age specification: %s" % age)
def GetAttributes(self, urns, ignore_cache=False, token=None,
age=NEWEST_TIME):
"""Retrieves all the attributes for all the urns."""
urns = [utils.SmartUnicode(u) for u in set(urns)]
try:
if not ignore_cache:
result = []
for subject in urns:
key = self._MakeCacheInvariant(subject, token, age)
result.append((subject, self.cache.Get(key)))
return result
except KeyError:
pass
subjects = []
result = {}
# If there are any cache misses, we need to go to the data store. So we
# might as well just re-fetch all the urns again in a single data store
# round trip.
for subject, values in data_store.DB.MultiResolveRegex(
urns, AFF4_PREFIXES,
timestamp=self.ParseAgeSpecification(age),
token=token, limit=None).items():
# Ensure the values are sorted.
values.sort(key=lambda x: x[-1], reverse=True)
key = self._MakeCacheInvariant(subject, token, age)
self.cache.Put(key, values)
result[utils.SmartUnicode(subject)] = values
subjects.append(subject)
return result.items()
def SetAttributes(self, urn, attributes, to_delete, sync=False, token=None):
"""Sets the attributes in the data store and update the cache."""
# Force a data_store lookup next.
try:
# Expire all entries in the cache for this urn (for all tokens, and
# timestamps)
self.cache.ExpirePrefix(utils.SmartStr(urn) + ":")
except KeyError:
pass
attributes[AFF4Object.SchemaCls.LAST] = [
rdfvalue.RDFDatetime().Now().SerializeToDataStore()]
to_delete.add(AFF4Object.SchemaCls.LAST)
data_store.DB.MultiSet(urn, attributes, token=token,
replace=False, sync=sync, to_delete=to_delete)
# TODO(user): This can run in the thread pool since its not time
# critical.
self._UpdateIndex(urn, attributes, token)
def _UpdateIndex(self, urn, attributes, token):
"""Updates any indexes we need."""
index = {}
for attribute, values in attributes.items():
if attribute.index:
for value, _ in values:
index.setdefault(attribute.index, []).append((attribute, value))
if index:
for index_urn, index_data in index.items():
aff4index = self.Create(index_urn, "AFF4Index", mode="w", token=token)
for attribute, value in index_data:
aff4index.Add(urn, attribute, value)
aff4index.Close()
self._UpdateChildIndex(urn, token)
def _UpdateChildIndex(self, urn, token):
"""Update the child indexes.
This function maintains the index for direct child relations. When we set
an AFF4 path, we always add an attribute like
index:dir/%(childname)s to its parent. This is written
asynchronously to its parent.
In order to query for all direct children of an AFF4 object, we then simple
get the attributes which match the regex index:dir/.+ which are the
direct children.
Args:
urn: The AFF4 object for which we update the index.
token: The token to use.
"""
try:
# Create navigation aids by touching intermediate subject names.
while urn.Path() != "/":
basename = urn.Basename()
dirname = rdfvalue.RDFURN(urn.Dirname())
try:
self.intermediate_cache.Get(urn.Path())
return
except KeyError:
data_store.DB.MultiSet(dirname, {
AFF4Object.SchemaCls.LAST: [
rdfvalue.RDFDatetime().Now().SerializeToDataStore()],
# This updates the directory index.
"index:dir/%s" % utils.SmartStr(basename): [EMPTY_DATA],
},
token=token, replace=True, sync=False)
self.intermediate_cache.Put(urn.Path(), 1)
urn = dirname
except access_control.UnauthorizedAccess:
pass
def _DeleteChildFromIndex(self, urn, token):
try:
# Create navigation aids by touching intermediate subject names.
basename = urn.Basename()
dirname = rdfvalue.RDFURN(urn.Dirname())
try:
self.intermediate_cache.ExpireObject(urn.Path())
except KeyError:
pass
data_store.DB.DeleteAttributes(
dirname, ["index:dir/%s" % utils.SmartStr(basename)], token=token,
sync=False)
data_store.DB.MultiSet(dirname, {
AFF4Object.SchemaCls.LAST: [
rdfvalue.RDFDatetime().Now().SerializeToDataStore()],
}, token=token, replace=True, sync=False)
except access_control.UnauthorizedAccess:
pass
def _ExpandURNComponents(self, urn, unique_urns):
"""This expands URNs.
This method breaks the urn into all the urns from its path components and
adds them to the set unique_urns.
Args:
urn: An RDFURN.
unique_urns: A set to add the components of the urn to.
"""
x = ROOT_URN
for component in rdfvalue.RDFURN(urn).Path().split("/"):
if component:
x = x.Add(component)
unique_urns.add(x)
def _MakeCacheInvariant(self, urn, token, age):
"""Returns an invariant key for an AFF4 object.
The object will be cached based on this key. This function is specifically
extracted to ensure that we encapsulate all security critical aspects of the
AFF4 object so that objects do not leak across security boundaries.
Args:
urn: The urn of the object.
token: The access token used to receive the object.
age: The age policy used to build this object. Should be one
of ALL_TIMES, NEWEST_TIME or a range.
Returns:
A key into the cache.
"""
return "%s:%s:%s" % (utils.SmartStr(urn), utils.SmartStr(token),
self.ParseAgeSpecification(age))
def _OpenWithLock(self, transaction, aff4_type=None, age=NEWEST_TIME,
lease_time=100):
values = list(transaction.ResolveRegex(
AFF4_PREFIXES, timestamp=self.ParseAgeSpecification(age)))
local_cache = {rdfvalue.RDFURN(transaction.subject): values}
# TODO(user): We should have a test here that open does not access the
# data store.
aff4_obj = self.Open(transaction.subject, aff4_type=aff4_type,
local_cache=local_cache, age=age,
mode="rw", follow_symlinks=False,
token=transaction.token)
leased_until = aff4_obj.Get(aff4_obj.Schema.LEASED_UNTIL)
if leased_until and leased_until > rdfvalue.RDFDatetime().Now():
raise LockError(transaction.subject)
# Force a data_store lookup next.
try:
# Expire all entries in the cache for this urn (for all tokens, and
# timestamps)
self.cache.ExpirePrefix(utils.SmartStr(transaction.subject) + ":")
except KeyError:
pass
new_lease_time = rdfvalue.RDFDatetime().FromSecondsFromEpoch(
time.time() + lease_time)
transaction.Set(aff4_obj.Schema.LEASED_UNTIL.predicate,
new_lease_time.SerializeToDataStore())
aff4_obj.Set(aff4_obj.Schema.LEASED_UNTIL, new_lease_time)
# We don't want the object to be dirty.
aff4_obj._SyncAttributes() # pylint: disable=protected-access
aff4_obj.locked = True
return aff4_obj
def OpenWithLock(self, urn, aff4_type=None, token=None,
age=NEWEST_TIME, blocking=True, blocking_lock_timeout=10,
blocking_sleep_interval=1, lease_time=100):
"""Open given urn and locks it.
Opens an object and locks it for 'lease_time' seconds. OpenWithLock can
only be used in 'with ...' statement. The lock is released when code
execution leaves 'with ...' block.
The urn is always opened in "rw" mode. Symlinks are not followed in
OpenWithLock() due to possible race conditions.
Args:
urn: The urn to open.
aff4_type: If this optional parameter is set, we raise an
InstanciationError if the object exists and is not an instance of this
type. This check is important when a different object can be stored in
this location.
token: The Security Token to use for opening this item.
age: The age policy used to build this object. Should be one of
NEWEST_TIME, ALL_TIMES or a time range given as a tuple (start, end) in
microseconds since Jan 1st, 1970.
blocking: When True, wait and repeatedly try to grab the lock.
blocking_lock_timeout: Maximum wait time when sync is True.
blocking_sleep_interval: Sleep time between lock grabbing attempts. Used
when blocking is True.
lease_time: Maximum time the object stays locked. Lock will be considered
released when this time expires.
Returns:
Context manager to be used in 'with ...' statement.
"""
timestamp = time.time()
if urn is not None:
urn = rdfvalue.RDFURN(urn)
while True:
try:
obj = data_store.DB.RetryWrapper(
urn, self._OpenWithLock, aff4_type=aff4_type, token=token,
age=age, lease_time=lease_time)
# When we open with lock, we should always use sync.
return LockContextManager(obj, sync=True)
except (data_store.TransactionError, LockError) as e:
if not blocking or time.time() - timestamp > blocking_lock_timeout:
raise LockError(e)
else:
time.sleep(blocking_sleep_interval)
def Open(self, urn, aff4_type=None, mode="r", ignore_cache=False,
token=None, local_cache=None, age=NEWEST_TIME, follow_symlinks=True):
"""Opens the named object.
This instantiates the object from the AFF4 data store.
Note that the root aff4:/ object is a container for all other
objects. Opening it for reading will instantiate a AFF4Volume instance, even
if the row does not exist.
The mode parameter specifies, how the object should be opened. A read only
mode will raise when calling Set() on it, while a write only object will
never read from the data store. Note that its impossible to open an object
with pure write support (since we have no idea what type it should be
without reading the data base) - use Create() instead for purely write mode.
Args:
urn: The urn to open.
aff4_type: If this parameter is set, we raise an IOError if
the object is not an instance of this type. This check is important
when a different object can be stored in this location. If mode is
"w", this parameter will determine the type of the object and is
mandatory.
mode: The mode to open the file with.
ignore_cache: Forces a data store read.
token: The Security Token to use for opening this item.
local_cache: A dict containing a cache as returned by GetAttributes. If
set, this bypasses the factory cache.
age: The age policy used to build this object. Should be one of
NEWEST_TIME, ALL_TIMES or a time range given as a tuple (start, end) in
microseconds since Jan 1st, 1970.
follow_symlinks: If object opened is a symlink, follow it.
Returns:
An AFF4Object instance.
Raises:
IOError: If the object is not of the required type.
AttributeError: If the requested mode is incorrect.
"""
if mode not in ["w", "r", "rw"]:
raise AttributeError("Invalid mode %s" % mode)
if mode == "w":
if aff4_type is None:
raise AttributeError("Need a type to open in write only mode.")
return self.Create(urn, aff4_type, mode=mode, token=token, age=age,
ignore_cache=ignore_cache, force_new_version=False)
<|fim▁hole|>
if "r" in mode and (local_cache is None or urn not in local_cache):
# Warm up the cache. The idea is to prefetch all the path components in
# the same round trip and make sure this data is in cache, so that as each
# AFF4 object is instantiated it can read attributes from cache rather
# than round tripping to the data store.
unique_urn = set()
self._ExpandURNComponents(urn, unique_urn)
local_cache = dict(
self.GetAttributes(unique_urn,
age=age, ignore_cache=ignore_cache,
token=token))
# Read the row from the table.
result = AFF4Object(urn, mode=mode, token=token, local_cache=local_cache,
age=age, follow_symlinks=follow_symlinks)
# Get the correct type.
existing_type = result.Get(result.Schema.TYPE, default="AFF4Volume")
if existing_type:
result = result.Upgrade(existing_type)
if (aff4_type is not None and
not isinstance(result, AFF4Object.classes[aff4_type])):
raise InstanciationError(
"Object %s is of type %s, but required_type is %s" % (
urn, result.__class__.__name__, aff4_type))
return result
def MultiOpen(self, urns, mode="rw", token=None, aff4_type=None,
age=NEWEST_TIME):
"""Opens a bunch of urns efficiently."""
if mode not in ["w", "r", "rw"]:
raise RuntimeError("Invalid mode %s" % mode)
# We accept both lists and generators of urns
urns = list(urns)
# Fill up the cache with all the urns
unique_urn = set()
for urn in urns:
self._ExpandURNComponents(urn, unique_urn)
cache = dict(self.GetAttributes(unique_urn, token=token, age=age))
symlinks = []
for urn in urns:
try:
if urn in cache:
obj = self.Open(urn, mode=mode, token=token, local_cache=cache,
aff4_type=aff4_type, age=age,
follow_symlinks=False)
target = obj.Get(obj.Schema.SYMLINK_TARGET)
if target is not None:
symlinks.append(target)
else:
yield obj
except IOError:
pass
if symlinks:
for obj in self.MultiOpen(symlinks, mode=mode, token=token,
aff4_type=aff4_type, age=age):
yield obj
def OpenDiscreteVersions(self, urn, mode="r", ignore_cache=False, token=None,
local_cache=None, age=ALL_TIMES,
follow_symlinks=True):
"""Returns all the versions of the object as AFF4 objects.
Args:
urn: The urn to open.
mode: The mode to open the file with.
ignore_cache: Forces a data store read.
token: The Security Token to use for opening this item.
local_cache: A dict containing a cache as returned by GetAttributes. If
set, this bypasses the factory cache.
age: The age policy used to build this object. Should be one of
ALL_TIMES or a time range
follow_symlinks: If object opened is a symlink, follow it.
Yields:
An AFF4Object for each version.
Raises:
IOError: On bad open or wrong time range specified.
This iterates through versions of an object, returning the newest version
first, then each older version until the beginning of time.
Note that versions are defined by changes to the TYPE attribute, and this
takes the version between two TYPE attributes.
In many cases as a user you don't want this, as you want to be returned an
object with as many attributes as possible, instead of the subset of them
that were Set between these two times.
"""
if age == NEWEST_TIME or len(age) == 1:
raise IOError("Bad age policy NEWEST_TIME for OpenDiscreteVersions.")
if len(age) == 2:
oldest_age = age[1]
else:
oldest_age = 0
aff4object = FACTORY.Open(urn, mode=mode, ignore_cache=ignore_cache,
token=token, local_cache=local_cache, age=age,
follow_symlinks=follow_symlinks)
# TYPE is always written last so we trust it to bound the version.
# Iterate from newest to oldest.
type_iter = aff4object.GetValuesForAttribute(aff4object.Schema.TYPE)
version_list = [(t.age, str(t)) for t in type_iter]
version_list.append((oldest_age, None))
for i in range(0, len(version_list)-1):
age_range = (version_list[i+1][0], version_list[i][0])
# Create a subset of attributes for use in the new object that represents
# this version.
clone_attrs = {}
for k, values in aff4object.synced_attributes.iteritems():
reduced_v = []
for v in values:
if v.age > age_range[0] and v.age <= age_range[1]:
reduced_v.append(v)
clone_attrs.setdefault(k, []).extend(reduced_v)
obj_cls = AFF4Object.classes[version_list[i][1]]
new_obj = obj_cls(urn, mode=mode, parent=aff4object.parent,
clone=clone_attrs, token=token, age=age_range,
local_cache=local_cache,
follow_symlinks=follow_symlinks)
new_obj.Initialize() # This is required to set local attributes.
yield new_obj
def Stat(self, urns, token=None):
"""Returns metadata about all urns.
Currently the metadata include type, and last update time.
Args:
urns: The urns of the objects to open.
token: The token to use.
Yields:
A dict of metadata.
Raises:
RuntimeError: A string was passed instead of an iterable.
"""
if isinstance(urns, basestring):
raise RuntimeError("Expected an iterable, not string.")
for subject, values in data_store.DB.MultiResolveRegex(
urns, ["aff4:type"], token=token).items():
yield dict(urn=rdfvalue.RDFURN(subject), type=values[0])
def Create(self, urn, aff4_type, mode="w", token=None, age=NEWEST_TIME,
ignore_cache=False, force_new_version=True):
"""Creates the urn if it does not already exist, otherwise opens it.
If the urn exists and is of a different type, this will also promote it to
the specified type.
Args:
urn: The object to create.
aff4_type: The desired type for this object.
mode: The desired mode for this object.
token: The Security Token to use for opening this item.
age: The age policy used to build this object. Only makes sense when mode
has "r".
ignore_cache: Bypass the aff4 cache.
force_new_version: Forces the creation of a new object in the data_store.
Returns:
An AFF4 object of the desired type and mode.
Raises:
AttributeError: If the mode is invalid.
"""
if mode not in ["w", "r", "rw"]:
raise AttributeError("Invalid mode %s" % mode)
if urn is not None:
urn = rdfvalue.RDFURN(urn)
if "r" in mode:
# Check to see if an object already exists.
try:
existing = self.Open(
urn, mode=mode, token=token, age=age,
ignore_cache=ignore_cache)
result = existing.Upgrade(aff4_type)
if force_new_version and existing.Get(result.Schema.TYPE) != aff4_type:
result.ForceNewVersion()
return result
except IOError:
pass
# Object does not exist, just make it.
cls = AFF4Object.classes[str(aff4_type)]
result = cls(urn, mode=mode, token=token, age=age)
result.Initialize()
if force_new_version:
result.ForceNewVersion()
return result
def Delete(self, urn, token=None, limit=1000):
"""Drop all the information about this object.
DANGEROUS! This recursively deletes all objects contained within the
specified URN.
Args:
urn: The object to remove.
token: The Security Token to use for opening this item.
limit: The number of objects to remove.
Raises:
RuntimeError: If the urn is too short. This is a safety check to ensure
the root is not removed.
"""
urn = rdfvalue.RDFURN(urn)
if len(urn.Path()) < 1:
raise RuntimeError("URN %s too short. Please enter a valid URN" % urn)
# Get all the children of this URN and delete them all.
logging.info("Recursively removing AFF4 Object %s", urn)
fd = FACTORY.Create(urn, "AFF4Volume", mode="rw", token=token)
count = 0
for child in fd.ListChildren():
logging.info("Removing child %s", child)
self.Delete(child, token=token)
count += 1
if count >= limit:
logging.info("Object limit reached, there may be further objects "
"to delete.")
data_store.DB.DeleteSubject(fd.urn, token=token)
self._DeleteChildFromIndex(fd.urn, token)
count += 1
logging.info("Removed %s objects", count)
# Ensure this is removed from the cache as well.
self.Flush()
def RDFValue(self, name):
return rdfvalue.RDFValue.classes.get(name)
def AFF4Object(self, name):
return AFF4Object.classes.get(name)
def Merge(self, first, second):
"""Merge two AFF4 objects and return a new object.
Args:
first: The first object (Can be None).
second: The second object (Can be None).
Returns:
A new object with the type of the latest object, but with both first and
second's attributes.
"""
if first is None: return second
if second is None: return first
# Make first the most recent object, and second the least recent:
if first.Get("type").age < second.Get("type").age:
first, second = second, first
# Merge the attributes together.
for k, v in second.synced_attributes.iteritems():
first.synced_attributes.setdefault(k, []).extend(v)
for k, v in second.new_attributes.iteritems():
first.new_attributes.setdefault(k, []).extend(v)
return first
def MultiListChildren(self, urns, token=None, limit=None, age=NEWEST_TIME):
"""Lists bunch of directories efficiently.
Args:
urns: List of urns to list children.
token: Security token.
limit: Max number of children to list (NOTE: this is per urn).
age: The age of the items to retrieve. Should be one of ALL_TIMES,
NEWEST_TIME or a range.
Returns:
A dict keyed by subjects, with values being a list of children urns of a
given subject.
"""
index_prefix = "index:dir/"
result = {}
for subject, values in data_store.DB.MultiResolveRegex(
urns, index_prefix + ".+", token=token,
timestamp=Factory.ParseAgeSpecification(age),
limit=limit).iteritems():
subject_result = []
for predicate, _, timestamp in values:
urn = rdfvalue.RDFURN(subject).Add(predicate[len(index_prefix):])
urn.age = rdfvalue.RDFDatetime(timestamp)
subject_result.append(urn)
result[subject] = subject_result
return result
def Flush(self):
data_store.DB.Flush()
self.cache.Flush()
self.intermediate_cache.Flush()
def UpdateNotificationRules(self):
fd = self.Open(rdfvalue.RDFURN("aff4:/config/aff4_rules"), mode="r",
token=self.root_token)
self.notification_rules = [rule for rule in fd.OpenChildren()
if isinstance(rule, AFF4NotificationRule)]
def NotifyWriteObject(self, aff4_object):
current_time = time.time()
if (current_time - self.notification_rules_timestamp >
config_lib.CONFIG["AFF4.notification_rules_cache_age"]):
self.notification_rules_timestamp = current_time
self.UpdateNotificationRules()
for rule in self.notification_rules:
try:
rule.OnWriteObject(aff4_object)
except Exception, e: # pylint: disable=broad-except
logging.error("Error while applying the rule: %s", e)
class Attribute(object):
"""AFF4 schema attributes are instances of this class."""
description = ""
# A global registry of attributes by name. This ensures we do not accidentally
# define the same attribute with conflicting types.
PREDICATES = {}
# A human readable name to be used in filter queries.
NAMES = {}
def __init__(self, predicate, attribute_type=rdfvalue.RDFString,
description="", name=None, _copy=False, default=None, index=None,
versioned=True, lock_protected=False,
creates_new_object_version=True):
"""Constructor.
Args:
predicate: The name of this attribute - must look like a URL
(e.g. aff4:contains). Will be used to store the attribute.
attribute_type: The RDFValue type of this attributes.
description: A one line description of what this attribute represents.
name: A human readable name for the attribute to be used in filters.
_copy: Used internally to create a copy of this object without
registering.
default: A default value will be returned if the attribute is not set on
an object. This can be a constant or a callback which receives the fd
itself as an arg.
index: The name of the index to use for this attribute. If None, the
attribute will not be indexed.
versioned: Should this attribute be versioned? Non-versioned attributes
always overwrite other versions of the same attribute.
lock_protected: If True, this attribute may only be set if the object was
opened via OpenWithLock().
creates_new_object_version: If this is set, a write to this attribute
will also write a new version of the parent attribute. This should be
False for attributes where lots of entries are collected like logs.
"""
self.name = name
self.predicate = predicate
self.attribute_type = attribute_type
self.description = description
self.default = default
self.index = index
self.versioned = versioned
self.lock_protected = lock_protected
self.creates_new_object_version = creates_new_object_version
# Field names can refer to a specific component of an attribute
self.field_names = []
if not _copy:
# Check the attribute registry for conflicts
try:
old_attribute = Attribute.PREDICATES[predicate]
if old_attribute.attribute_type != attribute_type:
msg = "Attribute %s defined with conflicting types (%s, %s)" % (
predicate, old_attribute.attribute_type.__class__.__name__,
attribute_type.__class__.__name__)
logging.error(msg)
raise RuntimeError(msg)
except KeyError:
pass
# Register
self.PREDICATES[predicate] = self
if name:
self.NAMES[name] = self
def Copy(self):
"""Return a copy without registering in the attribute registry."""
return Attribute(self.predicate, self.attribute_type, self.description,
self.name, _copy=True)
def __call__(self, *args, **kwargs):
"""A shortcut allowing us to instantiate a new type from an attribute."""
result = self.attribute_type(*args, **kwargs)
result.attribute_instance = self
return result
def __str__(self):
return self.predicate
def __repr__(self):
return "<Attribute(%s, %s)>" %(self.name, self.predicate)
def __hash__(self):
return hash(self.predicate)
def __eq__(self, other):
return str(self.predicate) == str(other)
def __ne__(self, other):
return str(self.predicate) != str(other)
def __getitem__(self, item):
result = self.Copy()
result.field_names = item.split(".")
return result
def Fields(self):
return self.attribute_type.Fields()
@classmethod
def GetAttributeByName(cls, name):
# Support attribute names with a . in them:
try:
if "." in name:
name, field = name.split(".", 1)
return cls.NAMES[name][field]
return cls.NAMES[name]
except KeyError:
raise AttributeError("Invalid attribute")
def GetRDFValueType(self):
"""Returns this attribute's RDFValue class."""
result = self.attribute_type
for field_name in self.field_names:
# Support the new semantic protobufs.
if issubclass(result, rdfvalue.RDFProtoStruct):
result = result.type_infos.get(field_name, rdfvalue.RDFString).type
else:
# TODO(user): Remove and deprecate.
# Support for the old RDFProto.
result = result.rdf_map.get(field_name, rdfvalue.RDFString)
return result
def GetValues(self, fd):
"""Return the values for this attribute as stored in an AFF4Object."""
result = False
for result in fd.new_attributes.get(self, []):
# We need to interpolate sub fields in this rdfvalue.
if self.field_names:
for x in result.GetFields(self.field_names):
yield x
else:
yield result
for result in fd.synced_attributes.get(self, []):
result = result.ToRDFValue()
# We need to interpolate sub fields in this rdfvalue.
if self.field_names:
for x in result.GetFields(self.field_names):
yield x
else:
yield result
if not result:
default = self.GetDefault(fd)
if default is not None:
yield default
def GetDefault(self, fd=None, default=None):
"""Returns a default attribute if it is not set."""
if callable(self.default):
return self.default(fd)
if self.default is not None:
return self(self.default)
if isinstance(default, rdfvalue.RDFValue):
default = default.Copy()
default.attribute_instance = self
return default
class SubjectAttribute(Attribute):
"""An attribute which virtualises the subject."""
def __init__(self):
Attribute.__init__(self, "aff4:subject",
rdfvalue.Subject, "A subject pseodo attribute",
"subject")
def GetValues(self, fd):
return [rdfvalue.Subject(fd.urn)]
class ClassProperty(property):
"""A property which comes from the class object."""
def __get__(self, _, owner):
return self.fget.__get__(None, owner)()
class ClassInstantiator(property):
"""A property which instantiates the class on getting."""
def __get__(self, _, owner):
return self.fget()
class LazyDecoder(object):
"""An object which delays serialize and unserialize as late as possible.
The current implementation requires the proxied object to be immutable.
"""
def __init__(self, rdfvalue_cls=None, serialized=None, age=None,
decoded=None):
self.rdfvalue_cls = rdfvalue_cls
self.serialized = serialized
self.age = age
self.decoded = decoded
def ToRDFValue(self):
if self.decoded is None:
self.decoded = self.rdfvalue_cls(initializer=self.serialized,
age=self.age)
return self.decoded
def FromRDFValue(self):
return self.serialized
class AFF4Object(object):
"""Base class for all objects."""
# We are a registered class.
__metaclass__ = registry.MetaclassRegistry
include_plugins_as_attributes = True
# This property is used in GUIs to define behaviours. These can take arbitrary
# values as needed. Behaviours are read only and set in the class definition.
_behaviours = frozenset()
# Should this object be synced back to the data store.
_dirty = False
# Is this object currently locked.
locked = False
@ClassProperty
@classmethod
def behaviours(cls): # pylint: disable=g-bad-name
return cls._behaviours
# We define the parts of the schema for each AFF4 Object as an internal
# class. As new objects extend this, they can add more attributes to their
# schema by extending their parents. Note that the class must be named
# SchemaCls.
class SchemaCls(object):
"""The standard AFF4 schema."""
# NOTE: we don't version the type in order not to accumulate its versions
# during blind write operations.
TYPE = Attribute("aff4:type", rdfvalue.RDFString,
"The name of the AFF4Object derived class.", "type")
SUBJECT = SubjectAttribute()
STORED = Attribute("aff4:stored", rdfvalue.RDFURN,
"The AFF4 container inwhich this object is stored.")
LAST = Attribute("metadata:last", rdfvalue.RDFDatetime,
"The last time any attribute of this object was written.",
creates_new_object_version=False)
LABEL = Attribute("aff4:labels", grr_rdf.LabelList,
"Any object can have labels applied to it.")
LEASED_UNTIL = Attribute("aff4:lease", rdfvalue.RDFDatetime,
"The time until which the object is leased by a "
"particular caller.", versioned=False,
creates_new_object_version=False)
def ListAttributes(self):
for attr in dir(self):
attr = getattr(self, attr)
if isinstance(attr, Attribute):
yield attr
def GetAttribute(self, name):
for i in self.ListAttributes():
# Attributes are accessible by predicate or name
if i.name == name or i.predicate == name:
return i
def __getattr__(self, attr):
"""For unknown attributes just return None.
Often the actual object returned is not the object that is expected. In
those cases attempting to retrieve a specific named attribute will raise,
e.g.:
fd = aff4.FACTORY.Open(urn)
fd.Get(fd.Schema.SOME_ATTRIBUTE, default_value)
This simply ensures that the default is chosen.
Args:
attr: Some ignored attribute.
"""
return None
# Make sure that when someone references the schema, they receive an instance
# of the class.
@property
def Schema(self): # pylint: disable=g-bad-name
return self.SchemaCls()
def __init__(self, urn, mode="r", parent=None, clone=None, token=None,
local_cache=None, age=NEWEST_TIME, follow_symlinks=True):
if urn is not None:
urn = rdfvalue.RDFURN(urn)
self.urn = urn
self.mode = mode
self.parent = parent
self.token = token
self.age_policy = age
self.follow_symlinks = follow_symlinks
self.lock = utils.PickleableLock()
# This flag will be set whenever an attribute is changed that has the
# creates_new_object_version flag set.
self._new_version = False
# Mark out attributes to delete when Flushing()
self._to_delete = set()
# We maintain two attribute caches - self.synced_attributes reflects the
# attributes which are synced with the data_store, while self.new_attributes
# are new attributes which still need to be flushed to the data_store. When
# this object is instantiated we populate self.synced_attributes with the
# data_store, while the finish method flushes new changes.
if clone is not None:
if isinstance(clone, dict):
# Just use these as the attributes, do not go to the data store. This is
# a quick way of creating an object with data which was already fetched.
self.new_attributes = {}
self.synced_attributes = clone
elif isinstance(clone, AFF4Object):
# We were given another object to clone - we do not need to access the
# data_store now.
self.new_attributes = clone.new_attributes.copy()
self.synced_attributes = clone.synced_attributes.copy()
else:
raise RuntimeError("Cannot clone from %s." % clone)
else:
self.new_attributes = {}
self.synced_attributes = {}
if "r" in mode:
if local_cache:
try:
for attribute, value, ts in local_cache[utils.SmartUnicode(urn)]:
self.DecodeValueFromAttribute(attribute, value, ts)
except KeyError:
pass
else:
# Populate the caches from the data store.
for urn, values in FACTORY.GetAttributes([urn], age=age,
token=self.token):
for attribute_name, value, ts in values:
self.DecodeValueFromAttribute(attribute_name, value, ts)
if clone is None:
self.Initialize()
def Initialize(self):
"""The method is called after construction to initialize the object.
This will be called after construction, and each time the object is
unserialized from the datastore.
An AFF4 object contains attributes which can be populated from the
database. This method is called to obtain a fully fledged object from
a collection of attributes.
"""
def DecodeValueFromAttribute(self, attribute_name, value, ts):
"""Given a serialized value, decode the attribute.
Only attributes which have been previously defined are permitted.
Args:
attribute_name: The string name of the attribute.
value: The serialized attribute value.
ts: The timestamp of this attribute.
"""
try:
# Get the Attribute object from our schema.
attribute = Attribute.PREDICATES[attribute_name]
cls = attribute.attribute_type
self._AddAttributeToCache(attribute, LazyDecoder(cls, value, ts),
self.synced_attributes)
except KeyError:
if not attribute_name.startswith("index:"):
logging.debug("Attribute %s not defined, skipping.", attribute_name)
except (ValueError, rdfvalue.DecodeError):
logging.debug("%s: %s invalid encoding. Skipping.",
self.urn, attribute_name)
def _AddAttributeToCache(self, attribute_name, value, cache):
"""Helper to add a new attribute to a cache."""
cache.setdefault(attribute_name, []).append(value)
def CheckLease(self):
if self.locked:
leased_until = self.Get(self.Schema.LEASED_UNTIL)
now = rdfvalue.RDFDatetime().Now()
if leased_until < now:
raise LockError("Lease for this object is expired "
"(leased until %s, now %s)!" % (leased_until, now))
def UpdateLease(self, duration):
"""Updates the lease and flushes the object.
The lease is set to expire after the "duration" time from the present
moment.
This method is supposed to be used when operation that requires locking
may run for a time that exceeds the lease time specified in OpenWithLock().
See flows/hunts locking for an example.
Args:
duration: Integer number of seconds. Lease expiry time will be set
to "time.time() + duration".
Raises:
LockError: if the object is not currently locked or the lease has
expired.
"""
if not self.locked:
raise LockError(
"Object must be locked to update the lease: %s." % self.urn)
# Check that current lease has not expired yet
self.CheckLease()
self.Set(
self.Schema.LEASED_UNTIL,
rdfvalue.RDFDatetime().FromSecondsFromEpoch(time.time() + duration))
self.Flush()
def Flush(self, sync=True):
"""Syncs this object with the data store, maintaining object validity."""
self.CheckLease()
self._WriteAttributes(sync=sync)
self._SyncAttributes()
if self.parent:
self.parent.Flush(sync=sync)
def Close(self, sync=True):
"""Close and destroy the object.
This is similar to Flush, but does not maintain object validity. Hence the
object should not be interacted with after Close().
Args:
sync: Write the attributes synchronously to the data store.
Raises:
LockError: The lease for this object has expired.
"""
self.CheckLease()
if self.locked:
self.Set(self.Schema.LEASED_UNTIL, rdfvalue.RDFDatetime(0))
self._WriteAttributes(sync=self.locked or sync)
if self.parent:
self.parent.Close(sync=sync)
# Interacting with a closed object is a bug. We need to catch this ASAP so
# we remove all mode permissions from this object.
self.mode = ""
@utils.Synchronized
def _WriteAttributes(self, sync=True):
"""Write the dirty attributes to the data store."""
# If the object is not opened for writing we do not need to flush it to the
# data_store.
if "w" not in self.mode:
return
if self.urn is None:
raise RuntimeError("Storing of anonymous AFF4 objects not supported.")
to_set = {}
for attribute_name, value_array in self.new_attributes.iteritems():
to_set_list = to_set.setdefault(attribute_name, [])
for value in value_array:
to_set_list.append((value.SerializeToDataStore(), value.age))
if self._dirty:
# We determine this object has a new version only if any of the versioned
# attributes have changed. Non-versioned attributes do not represent a new
# object version. The type of an object is versioned and represents a
# version point in the life of the object.
if self._new_version:
to_set[self.Schema.TYPE] = [
(rdfvalue.RDFString(self.__class__.__name__).SerializeToDataStore(),
rdfvalue.RDFDatetime().Now())]
# Write the attributes to the Factory cache.
FACTORY.SetAttributes(self.urn, to_set, self._to_delete, sync=sync,
token=self.token)
# Notify the factory that this object got updated.
FACTORY.NotifyWriteObject(self)
@utils.Synchronized
def _SyncAttributes(self):
"""Sync the new attributes to the synced attribute cache.
This maintains object validity.
"""
# This effectively moves all the values from the new_attributes to the
# _attributes caches.
for attribute, value_array in self.new_attributes.iteritems():
if not attribute.versioned:
value = value_array[0]
self.synced_attributes[attribute] = [LazyDecoder(decoded=value,
age=value.age)]
else:
synced_value_array = self.synced_attributes.setdefault(attribute, [])
for value in value_array:
synced_value_array.append(LazyDecoder(decoded=value, age=value.age))
synced_value_array.sort(key=lambda x: x.age, reverse=True)
self.new_attributes = {}
self._to_delete.clear()
self._dirty = False
self._new_version = False
def _CheckAttribute(self, attribute, value):
"""Check that the value is of the expected type.
Args:
attribute: An instance of Attribute().
value: An instance of RDFValue.
Raises:
ValueError: when the value is not of the expected type.
AttributeError: When the attribute is not of type Attribute().
"""
if not isinstance(attribute, Attribute):
raise AttributeError("Attribute %s must be of type aff4.Attribute()",
attribute)
if not isinstance(value, attribute.attribute_type):
raise ValueError("Value for attribute %s must be of type %s()",
attribute, attribute.attribute_type.__name__)
def Copy(self, to_attribute, from_fd, from_attribute):
values = from_fd.GetValuesForAttribute(from_attribute)
for v in values:
self.AddAttribute(to_attribute, v, age=v.age)
def Set(self, attribute, value=None):
"""Set an attribute on this object.
Set() is now a synonym for AddAttribute() since attributes are never
deleted.
Args:
attribute: The attribute to set.
value: The new value for this attribute.
"""
self.AddAttribute(attribute, value)
def AddAttribute(self, attribute, value=None, age=None):
"""Add an additional attribute to this object.
If value is None, attribute is expected to be already initialized with a
value. For example:
fd.AddAttribute(fd.Schema.CONTAINS("some data"))
Args:
attribute: The attribute name or an RDFValue derived from the attribute.
value: The value the attribute will be set to.
age: Age (timestamp) of the attribute. If None, current time is used.
Raises:
IOError: If this object is read only.
"""
if "w" not in self.mode:
raise IOError("Writing attribute %s to read only object." % attribute)
if value is None:
value = attribute
attribute = value.attribute_instance
# Check if this object should be locked in order to add the attribute.
# NOTE: We don't care about locking when doing blind writes.
if self.mode != "w" and attribute.lock_protected and not self.locked:
raise IOError("Object must be locked to write attribute %s." % attribute)
self._CheckAttribute(attribute, value)
# Does this represent a new version?
if attribute.versioned:
if attribute.creates_new_object_version:
self._new_version = True
# Update the time of this new attribute.
if age:
value.age = age
else:
value.age.Now()
# Non-versioned attributes always replace previous versions and get written
# at the earliest timestamp (so they appear in all objects).
else:
self._to_delete.add(attribute)
value.age = 0
self._AddAttributeToCache(attribute, value, self.new_attributes)
self._dirty = True
@utils.Synchronized
def DeleteAttribute(self, attribute):
"""Clears the attribute from this object."""
if "w" not in self.mode:
raise IOError("Deleting attribute %s from read only object." % attribute)
# Check if this object should be locked in order to delete the attribute.
# NOTE: We don't care about locking when doing blind writes.
if self.mode != "w" and attribute.lock_protected and not self.locked:
raise IOError("Object must be locked to delete attribute %s." % attribute)
if attribute in self.synced_attributes:
self._to_delete.add(attribute)
del self.synced_attributes[attribute]
if attribute in self.new_attributes:
del self.new_attributes[attribute]
# Does this represent a new version?
if attribute.versioned and attribute.creates_new_object_version:
self._new_version = True
self._dirty = True
def IsAttributeSet(self, attribute):
"""Determine if the attribute is set.
Args:
attribute: The attribute to check.
Returns:
True if set, otherwise False.
Checking Get against None doesn't work as Get will return a default
attribute value. This determines if the attribute has been manually set.
"""
return (attribute in self.synced_attributes or
attribute in self.new_attributes)
def Get(self, attribute, default=None):
"""Gets the attribute from this object."""
if attribute is None:
return default
# Allow the user to specify the attribute by name.
elif isinstance(attribute, str):
attribute = Attribute.GetAttributeByName(attribute)
# We can't read attributes from the data_store unless read mode was
# specified. It is ok to read new attributes though.
if "r" not in self.mode and (attribute not in self.new_attributes and
attribute not in self.synced_attributes):
raise IOError(
"Fetching %s from object not opened for reading." % attribute)
for result in self.GetValuesForAttribute(attribute, only_one=True):
try:
# The attribute may be a naked string or int - i.e. not an RDFValue at
# all.
result.attribute_instance = attribute
except AttributeError:
pass
return result
return attribute.GetDefault(self, default)
def GetValuesForAttribute(self, attribute, only_one=False):
"""Returns a list of values from this attribute."""
if not only_one and self.age_policy == NEWEST_TIME:
raise RuntimeError("Attempting to read all attribute versions for an "
"object opened for NEWEST_TIME. This is probably "
"not what you want.")
if attribute is None:
return []
elif isinstance(attribute, basestring):
attribute = Attribute.GetAttributeByName(attribute)
return attribute.GetValues(self)
def Update(self, attribute=None, user=None, priority=None):
"""Requests the object refresh an attribute from the Schema."""
def Upgrade(self, aff4_class):
"""Upgrades this object to the type specified.
AFF4 Objects can be upgraded on the fly to other type - As long as the new
type is derived from the current type. This feature allows creation of
placeholder objects which can later be upgraded to the fully featured
object.
Note: It is not allowed to downgrade an object if that would result in a
loss of information (since the new object has a smaller schema). This method
tries to store the new object with its new attributes and will fail if any
attributes can not be mapped.
Args:
aff4_class: A string representing the new class.
Returns:
an instance of the new class with all the same attributes as this current
object.
Raises:
AttributeError: When the new object can not accept some of the old
attributes.
InstanciationError: When we cannot instantiate the object type class.
"""
# We are already of the required type
if self.__class__.__name__ == aff4_class:
return self
# Instantiate the right type
cls = self.classes.get(str(aff4_class))
if cls is None:
raise InstanciationError("Could not instantiate %s" % aff4_class)
# It's not allowed to downgrade the object
if isinstance(self, cls):
# TODO(user): check what we should do here:
# 1) Nothing
# 2) raise
# 3) return self
# Option 3) seems ok, but we need to be sure that we don't use
# Create(mode='r') anywhere where code actually expects the object to be
# downgraded.
return self
# NOTE: It is possible for attributes to become inaccessible here if the old
# object has an attribute which the new object does not have in its
# schema. The values of these attributes will not be available any longer in
# the new object - usually because old attributes do not make sense in the
# context of the new object.
# Instantiate the class
result = cls(self.urn, mode=self.mode, clone=self, parent=self.parent,
token=self.token, age=self.age_policy,
follow_symlinks=self.follow_symlinks)
result.Initialize()
return result
def ForceNewVersion(self):
self._dirty = True
self._new_version = True
def __repr__(self):
return "<%s@%X = %s>" % (self.__class__.__name__, hash(self), self.urn)
# The following are used to ensure a bunch of AFF4Objects can be sorted on
# their URNs.
def __gt__(self, other):
return self.urn > other
def __lt__(self, other):
return self.urn < other
# This will register all classes into this modules's namespace regardless of
# where they are defined. This allows us to decouple the place of definition of
# a class (which might be in a plugin) from its use which will reference this
# module.
AFF4Object.classes = globals()
class AttributeExpression(lexer.Expression):
"""An expression which is used to filter attributes."""
def SetAttribute(self, attribute):
"""Checks that attribute is a valid Attribute() instance."""
# Grab the attribute registered for this name
self.attribute = attribute
self.attribute_obj = Attribute.GetAttributeByName(attribute)
if self.attribute_obj is None:
raise lexer.ParseError("Attribute %s not defined" % attribute)
def SetOperator(self, operator):
"""Sets the operator for this expression."""
self.operator = operator
# Find the appropriate list of operators for this attribute
attribute_type = self.attribute_obj.GetRDFValueType()
operators = attribute_type.operators
# Do we have such an operator?
self.number_of_args, self.operator_method = operators.get(
operator, (0, None))
if self.operator_method is None:
raise lexer.ParseError("Operator %s not defined on attribute '%s'" % (
operator, self.attribute))
self.operator_method = getattr(attribute_type, self.operator_method)
def Compile(self, filter_implemention):
"""Returns the data_store filter implementation from the attribute."""
return self.operator_method(self.attribute_obj,
filter_implemention, *self.args)
class AFF4QueryParser(lexer.SearchParser):
expression_cls = AttributeExpression
class AFF4Volume(AFF4Object):
"""Volumes contain other objects.
The AFF4 concept of a volume abstracts away how objects are stored. We simply
define an AFF4 volume as a container of other AFF4 objects. The volume may
implement any storage mechanism it likes, including virtualizing the objects
contained within it.
"""
_behaviours = frozenset(["Container"])
class SchemaCls(AFF4Object.SchemaCls):
CONTAINS = Attribute("aff4:contains", rdfvalue.RDFURN,
"An AFF4 object contained in this container.")
def Query(self, filter_string="", filter_obj=None, limit=1000,
age=NEWEST_TIME):
"""A way to query the collection based on a filter object.
Args:
filter_string: An optional filter applied to our members. The filter
string should correspond to the syntax described in lexer.py.
filter_obj: An optional compiled filter (as obtained from lexer.Compile().
limit: A limit on the number of returned rows.
age: The age of the objects to retrieve. Should be one of ALL_TIMES,
NEWEST_TIME or a range.
Returns:
A generator of all children which match the filter.
"""
# If no filtering is required we can just use OpenChildren.
if not filter_obj and not filter_string:
return self.OpenChildren(limit=limit, age=age)
if filter_obj is None and filter_string:
# Parse the query string
ast = AFF4QueryParser(filter_string).Parse()
# Query our own data store
filter_obj = ast.Compile(data_store.DB.filter)
result_set = data_store.DB.Query(
[], filter_obj, limit=limit, subject_prefix=self.urn, token=self.token)
result = data_store.ResultSet(
self.OpenChildren([m["subject"][0][0] for m in result_set],
limit=limit,
age=age))
result.total_count = result_set.total_count
return result
def OpenMember(self, path, mode="r"):
"""Opens the member which is contained in us.
Args:
path: A string relative to our own URN or an absolute urn.
mode: Mode for object.
Returns:
an AFF4Object instance.
Raises:
InstanciationError: If we are unable to open the member (e.g. it does not
already exist.)
"""
if isinstance(path, rdfvalue.RDFURN):
child_urn = path
else:
child_urn = self.urn.Add(path)
# Read the row from the table.
result = AFF4Object(child_urn, mode=mode, token=self.token)
# Get the correct type.
aff4_type = result.Get(result.Schema.TYPE)
if aff4_type:
# Try to get the container.
return result.Upgrade(aff4_type)
raise InstanciationError("Path %s not found" % path)
def ListChildren(self, limit=1000000, age=NEWEST_TIME):
"""Yields RDFURNs of all the children of this object.
Args:
limit: Total number of items we will attempt to retrieve.
age: The age of the items to retrieve. Should be one of ALL_TIMES,
NEWEST_TIME or a range.
Yields:
RDFURNs instances of each child.
"""
# Just grab all the children from the index.
index_prefix = "index:dir/"
for predicate, _, timestamp in data_store.DB.ResolveRegex(
self.urn, index_prefix + ".+", token=self.token,
timestamp=Factory.ParseAgeSpecification(age), limit=limit):
urn = self.urn.Add(predicate[len(index_prefix):])
urn.age = rdfvalue.RDFDatetime(timestamp)
yield urn
def OpenChildren(self, children=None, mode="r", limit=1000000,
chunk_limit=100000, age=NEWEST_TIME):
"""Yields AFF4 Objects of all our direct children.
This method efficiently returns all attributes for our children directly, in
a few data store round trips. We use the directory indexes to query the data
store.
Args:
children: A list of children RDFURNs to open. If None open all our
children.
mode: The mode the files should be opened with.
limit: Total number of items we will attempt to retrieve.
chunk_limit: Maximum number of items to retrieve at a time.
age: The age of the items to retrieve. Should be one of ALL_TIMES,
NEWEST_TIME or a range.
Yields:
Instances for each direct child.
"""
if children is None:
subjects = list(self.ListChildren(limit=limit, age=age))
else:
subjects = list(children)
subjects.sort()
# Read at most limit children at a time.
while subjects:
to_read = subjects[:chunk_limit]
subjects = subjects[chunk_limit:]
for child in FACTORY.MultiOpen(to_read, mode=mode, token=self.token,
age=age):
yield child
class AFF4Root(AFF4Volume):
"""The root of the VFS.
This virtual collection contains the entire virtual filesystem, and therefore
can be queried across the entire data store.
"""
def Query(self, filter_string="", filter_obj=None, subjects=None, limit=100):
"""Filter the objects contained within this collection."""
if filter_obj is None and filter_string:
# Parse the query string
ast = AFF4QueryParser(filter_string).Parse()
# Query our own data store
filter_obj = ast.Compile(data_store.DB.filter)
subjects = []
result_set = data_store.DB.Query([], filter_obj, subjects=subjects,
limit=limit, token=self.token)
for match in result_set:
subjects.append(match["subject"][0][0])
# Open them all at once.
result = data_store.ResultSet(FACTORY.MultiOpen(subjects, token=self.token))
result.total_count = result_set.total_count
return result
def OpenMember(self, path, mode="r"):
"""If we get to the root without a container, virtualize an empty one."""
urn = self.urn.Add(path)
result = AFF4Volume(urn, mode=mode, token=self.token)
result.Initialize()
return result
class AFF4Symlink(AFF4Object):
"""This is a symlink to another AFF4 object.
This means that opening this object will return the linked to object. To
create a symlink, one must open the symlink for writing and set the
Schema.SYMLINK_TARGET attribute.
Opening the object for reading will return the linked to object.
"""
class SchemaCls(AFF4Object.SchemaCls):
SYMLINK_TARGET = Attribute("aff4:symlink_target", rdfvalue.RDFURN,
"The target of this link.")
def __new__(cls, urn, mode="r", clone=None, token=None, local_cache=None,
age=NEWEST_TIME, parent=None, follow_symlinks=True):
# When first created, the symlink object is exposed.
if mode == "w" or not follow_symlinks:
return super(AFF4Symlink, cls).__new__(
cls, urn, mode=mode, clone=clone, token=token, parent=parent,
local_cache=local_cache, age=age)
elif clone is not None:
# Get the real object (note, clone shouldn't be None during normal
# object creation process):
target_urn = clone.Get(cls.SchemaCls.SYMLINK_TARGET)
return FACTORY.Open(target_urn, mode=mode, age=age, token=token)
else:
raise RuntimeError("Unable to open symlink.")
class AFF4OverlayedVolume(AFF4Volume):
"""A special kind of volume with overlayed contained objects.
This AFF4Volume can contain virtual overlays. An overlay is a path which
appears to be contained within our object, but is in fact the same object. For
example if self.urn = RDFURN('aff4:/C.123/foobar'):
Opening aff4:/C.123/foobar/overlayed/ will return a copy of aff4:/C.123/foobar
with the variable self.overlayed_path = "overlayed".
This is used to effectively allow a single AFF4Volume to handle overlay
virtual paths inside itself without resorting to storing anything in the
database for every one of these object. Thus we can have a WinRegistry
AFF4Volume that handles any paths within without having storage for each
registry key.
"""
overlayed_path = ""
def IsPathOverlayed(self, path): # pylint: disable=unused-argument
"""Should this path be overlayed.
Args:
path: A direct_child of ours.
Returns:
True if the path should be overlayed.
"""
return False
def OpenMember(self, path, mode="rw"):
if self.IsPathOverlayed(path):
result = self.__class__(self.urn, mode=mode, clone=self, parent=self)
result.overlayed_path = path
return result
return super(AFF4OverlayedVolume, self).OpenMember(path, mode)
def CreateMember(self, path, aff4_type, mode="w", clone=None):
if self.IsPathOverlayed(path):
result = self.__class__(self.urn, mode=mode, clone=self, parent=self)
result.overlayed_path = path
return result
return super(AFF4OverlayedVolume, self).CreateMember(
path, aff4_type, mode=mode, clone=clone)
class AFF4Stream(AFF4Object):
"""An abstract stream for reading data."""
__metaclass__ = abc.ABCMeta
# The read pointer offset.
offset = 0
# Updated when the object becomes dirty.
dirty = False
class SchemaCls(AFF4Object.SchemaCls):
# Note that a file on the remote system might have stat.st_size > 0 but if
# we do not have any of the data available to read: size = 0.
SIZE = Attribute("aff4:size", rdfvalue.RDFInteger,
"The total size of available data for this stream.",
"size", default=0)
@abc.abstractmethod
def Read(self, length):
pass
@abc.abstractmethod
def Write(self, data):
pass
@abc.abstractmethod
def Tell(self):
pass
@abc.abstractmethod
def Seek(self, offset, whence=0):
pass
# These are file object conformant namings for library functions that
# grr uses, and that expect to interact with 'real' file objects.
read = utils.Proxy("Read")
seek = utils.Proxy("Seek")
tell = utils.Proxy("Tell")
close = utils.Proxy("Close")
write = utils.Proxy("Write")
class AFF4MemoryStream(AFF4Stream):
"""A stream which keeps all data in memory."""
class SchemaCls(AFF4Stream.SchemaCls):
CONTENT = Attribute("aff4:content", rdfvalue.RDFBytes,
"Total content of this file.", default="")
def Initialize(self):
"""Try to load the data from the store."""
contents = ""
if "r" in self.mode:
contents = self.Get(self.Schema.CONTENT)
try:
if contents is not None:
contents = zlib.decompress(utils.SmartStr(contents))
except zlib.error:
pass
self.fd = StringIO.StringIO(contents)
self.size = rdfvalue.RDFInteger(len(contents))
self.offset = 0
def Truncate(self, offset=None):
if offset is None:
offset = self.offset
self.fd = StringIO.StringIO(self.fd.getvalue()[:offset])
self.size.Set(offset)
def Read(self, length):
return self.fd.read(int(length))
def Write(self, data):
if isinstance(data, unicode):
raise IOError("Cannot write unencoded string.")
self.dirty = True
self.fd.write(data)
self.size = self.fd.len
def Tell(self):
return self.fd.tell()
def Seek(self, offset, whence=0):
self.fd.seek(offset, whence)
def Flush(self, sync=True):
if self.dirty:
compressed_content = zlib.compress(self.fd.getvalue())
self.Set(self.Schema.CONTENT(compressed_content))
self.Set(self.Schema.SIZE(self.size))
super(AFF4MemoryStream, self).Flush(sync=sync)
def Close(self, sync=True):
if self.dirty:
compressed_content = zlib.compress(self.fd.getvalue())
self.Set(self.Schema.CONTENT(compressed_content))
self.Set(self.Schema.SIZE(self.size))
super(AFF4MemoryStream, self).Close(sync=sync)
class AFF4ObjectCache(utils.PickleableStore):
"""A cache which closes its objects when they expire."""
def KillObject(self, obj):
obj.Close()
class AFF4Image(AFF4Stream):
"""An AFF4 Image is stored in segments.
We are both an Image here and a volume (since we store the segments inside
us).
"""
NUM_RETRIES = 10
CHUNK_ID_TEMPLATE = "%010X"
# This is the chunk size of each chunk. The chunksize can not be changed once
# the object is created.
chunksize = 64 * 1024
class SchemaCls(AFF4Stream.SchemaCls):
_CHUNKSIZE = Attribute("aff4:chunksize", rdfvalue.RDFInteger,
"Total size of each chunk.", default=64*1024)
def Initialize(self):
"""Build a cache for our chunks."""
super(AFF4Image, self).Initialize()
self.offset = 0
# A cache for segments - When we get pickled we want to discard them.
self.chunk_cache = AFF4ObjectCache(100)
if "r" in self.mode:
self.size = int(self.Get(self.Schema.SIZE))
# pylint: disable=protected-access
self.chunksize = int(self.Get(self.Schema._CHUNKSIZE))
# pylint: enable=protected-access
else:
self.size = 0
def SetChunksize(self, chunksize):
# pylint: disable=protected-access
self.Set(self.Schema._CHUNKSIZE(chunksize))
# pylint: enable=protected-access
self.chunksize = int(chunksize)
self.Truncate(0)
def Seek(self, offset, whence=0):
# This stream does not support random writing in "w" mode. When the stream
# is opened in "w" mode we can not read from the data store and therefore we
# can not merge writes with existing data. It only makes sense to append to
# existing streams.
if self.mode == "w":
# Seeking to the end of the stream is ok.
if not (whence == 2 and offset == 0):
raise IOError("Can not seek with an AFF4Image opened for write only.")
if whence == 0:
self.offset = offset
elif whence == 1:
self.offset += offset
elif whence == 2:
self.offset = long(self.size) + offset
def Tell(self):
return self.offset
def Truncate(self, offset=0):
self._dirty = True
self.size = offset
self.offset = offset
self.chunk_cache.Flush()
def _GetChunkForWriting(self, chunk):
chunk_name = self.urn.Add(self.CHUNK_ID_TEMPLATE % chunk)
try:
fd = self.chunk_cache.Get(chunk_name)
except KeyError:
fd = FACTORY.Create(chunk_name, "AFF4MemoryStream", mode="rw",
token=self.token)
self.chunk_cache.Put(chunk_name, fd)
return fd
def _GetChunkForReading(self, chunk):
chunk_name = self.urn.Add(self.CHUNK_ID_TEMPLATE % chunk)
try:
fd = self.chunk_cache.Get(chunk_name)
except KeyError:
# The most common read access pattern is contiguous reading. Here we
# readahead to reduce round trips.
missing_chunks = []
for chunk_number in range(chunk, chunk + 10):
new_chunk_name = self.urn.Add(self.CHUNK_ID_TEMPLATE % chunk_number)
try:
self.chunk_cache.Get(new_chunk_name)
except KeyError:
missing_chunks.append(new_chunk_name)
for child in FACTORY.MultiOpen(
missing_chunks, mode="rw", token=self.token, age=self.age_policy):
if isinstance(child, AFF4Stream):
self.chunk_cache.Put(child.urn, child)
# This should work now - otherwise we just give up.
try:
fd = self.chunk_cache.Get(chunk_name)
except KeyError:
raise IOError("Cannot open chunk %s" % chunk_name)
return fd
def _ReadPartial(self, length):
"""Read as much as possible, but not more than length."""
chunk = self.offset / self.chunksize
chunk_offset = self.offset % self.chunksize
available_to_read = min(length, self.chunksize - chunk_offset)
retries = 0
while retries < self.NUM_RETRIES:
fd = self._GetChunkForReading(chunk)
if fd:
break
# Arriving here means we know about blobs that cannot be found in the db.
# The most likely reason is that they have not been synced yet so we
# retry a couple of times just in case they come in eventually.
logging.warning("Chunk not found.")
time.sleep(1)
retries += 1
if retries >= self.NUM_RETRIES:
raise IOError("Chunk not found for reading.")
fd.Seek(chunk_offset)
result = fd.Read(available_to_read)
self.offset += len(result)
return result
def Read(self, length):
"""Read a block of data from the file."""
result = ""
# The total available size in the file
length = int(length)
length = min(length, self.size - self.offset)
while length > 0:
data = self._ReadPartial(length)
if not data: break
length -= len(data)
result += data
return result
def _WritePartial(self, data):
chunk = self.offset / self.chunksize
chunk_offset = self.offset % self.chunksize
data = utils.SmartStr(data)
available_to_write = min(len(data), self.chunksize - chunk_offset)
fd = self._GetChunkForWriting(chunk)
fd.Seek(chunk_offset)
fd.Write(data[:available_to_write])
self.offset += available_to_write
return data[available_to_write:]
def Write(self, data):
self._dirty = True
if isinstance(data, unicode):
raise IOError("Cannot write unencoded string.")
while data:
data = self._WritePartial(data)
self.size = max(self.size, self.offset)
def Flush(self, sync=True):
"""Sync the chunk cache to storage."""
if self._dirty:
chunk_id = self.offset / self.chunksize
chunk_name = self.urn.Add(self.CHUNK_ID_TEMPLATE % chunk_id)
current_chunk = self.chunk_cache.Pop(chunk_name)
# Flushing the cache will call Close() on all the chunks. We hold on to
# the current chunk to ensure it does not get closed.
self.chunk_cache.Flush()
if current_chunk:
current_chunk.Flush(sync=sync)
self.chunk_cache.Put(chunk_name, current_chunk)
self.Set(self.Schema.SIZE(self.size))
super(AFF4Image, self).Flush(sync=sync)
def Close(self, sync=True):
"""This method is called to sync our data into storage.
Args:
sync: Should flushing be synchronous.
"""
self.Flush(sync=sync)
class AFF4NotificationRule(AFF4Object):
def OnWriteObject(self, unused_aff4_object):
raise NotImplementedError()
# Utility functions
class AFF4InitHook(registry.InitHook):
pre = ["DataStoreInit"]
def Run(self):
"""Delayed loading of aff4 plugins to break import cycles."""
# pylint: disable=unused-variable,global-statement,g-import-not-at-top
from grr.lib import aff4_objects
global FACTORY
FACTORY = Factory() # pylint: disable=g-bad-name
# pylint: enable=unused-variable,global-statement,g-import-not-at-top
class AFF4Filter(object):
"""A simple filtering system to be used with Query()."""
__metaclass__ = registry.MetaclassRegistry
# Automatically register plugins as class attributes
include_plugins_as_attributes = True
def __init__(self, *args):
self.args = args
@abc.abstractmethod
def Filter(self, subjects):
"""A generator which filters the subjects.
Args:
subjects: An iterator of aff4 objects.
Returns:
A generator over all the Objects which pass the filter.
"""
# A global registry of all AFF4 classes
FACTORY = None
ROOT_URN = rdfvalue.RDFURN("aff4:/")
def issubclass(obj, cls): # pylint: disable=redefined-builtin,g-bad-name
"""A sane implementation of issubclass.
See http://bugs.python.org/issue10569
Python bare issubclass must be protected by an isinstance test first since it
can only work on types and raises when provided something which is not a type.
Args:
obj: Any object or class.
cls: The class to check against.
Returns:
True if obj is a subclass of cls and False otherwise.
"""
return isinstance(obj, type) and __builtin__.issubclass(obj, cls)<|fim▁end|> | urn = rdfvalue.RDFURN(urn) |
<|file_name|>managers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import copy
import logging
import time
from collections import OrderedDict
from contextlib import contextmanager
import six
from django.utils.functional import cached_property
from .client import get_es_client
logger = logging.getLogger('elasticindex')
class ElasticQuerySet(object):
def __init__(self, model_cls, body=None, **kwargs):
self.model_cls = model_cls
self.body = body or {"query": {"match_all": {}}}
self.kwargs = kwargs or {}
self.latest_total_count = None
self.latest_raw_result = None
self.query_finished = False
def __len__(self):
return len(self.result_list)
def __iter__(self):
return iter(self.result_list)
def __bool__(self):
return bool(self.result_list)
def __getitem__(self, k):
"""
Retrieves an item or slice from the set of results.
"""
if not isinstance(k, (slice,) + six.integer_types):
raise TypeError
assert ((not isinstance(k, slice) and (k >= 0)) or
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
(k.stop is None or k.stop >= 0))), \
"Negative indexing is not supported."
if self.query_finished:
return self.result_list[k]
if isinstance(k, slice):
qs = self
offset = 0
if k.start is not None:
offset = int(k.start)
qs = qs.offset(offset)
if k.stop is not None:
limit = int(k.stop) - offset
qs = qs.limit(limit)
return list(qs)[::k.step] if k.step else qs
qs = self.limit(1).offset(k)
return list(qs)[0]
def _clone(self):
"""
:rtype: ElasticQuerySet
"""
qs = self.__class__(
self.model_cls, copy.deepcopy(self.body),
**copy.deepcopy(self.kwargs))
return qs
@cached_property
def result_list(self):
self.query_finished = True
return list(self.get_result())
def get_result(self):
"""
elasticsearch の search をそのまま実行
:rtype: generator
"""
with self.log_query():
result = self.es_client.search(
index=self.model_cls.INDEX,
doc_type=self.model_cls.DOC_TYPE,
body=self.body, **self.kwargs)
self.latest_total_count = result['hits']['total']
self.latest_raw_result = result
for hit in result['hits']['hits']:
yield self.model_cls(hit)
@cached_property
def es_client(self):
"""
:rtype: Elasticsearch
"""
return get_es_client()
def get_by_id(self, id):
"""
Elasticsearch のIDで1件取得
:param id:
:return:
"""
result = self.es_client.get(
self.model_cls.INDEX, id, doc_type=self.model_cls.DOC_TYPE)
self.latest_raw_result = result
if not result['found']:
raise self.model_cls.DoesNotExist(id)
return self.model_cls(result)
def delete_by_id(self, id, **kwargs):
"""
Elasticsearch のIDで1件削除
:param id: elasticsearch document id
"""
result = self.es_client.delete(
self.model_cls.INDEX, self.model_cls.DOC_TYPE, id, **kwargs)
self.latest_raw_result = result
return result
def all(self):
"""
:rtype: ElasticQuerySet
"""
return self._clone()
def limit(self, limit):
"""
:rtype: ElasticQuerySet
"""
o = self._clone()
if limit is None:
if 'size' in o.body:
del o.body['size']
else:
o.body['size'] = limit
return o
def offset(self, offset):
"""
:rtype: ElasticQuerySet
"""
o = self._clone()
if offset is None:
if 'from' in o.body:
del o.body['from']
else:
o.body['from'] = offset
return o
def query(self, filter_query_dict):
"""
:param filter_query_dict:
- {"match": {"product_id": 192}}
- {"match_all": {}} # default
- {"multi_match": {
"query": query_word,
"fields": [
"upc", "title^3", "description", "authors",
"publishers", "tags", "keywords"]
}}
- {"bool": {
"must": [
{"match": {"is_used": True}},
{"range": {"stock": {"gt": 0}}}
]}}
:rtype: ElasticQuerySet
"""
o = self._clone()
o.body['query'] = filter_query_dict
return o
def set_body(self, body_dict):
"""
replace query body
"""
o = self._clone()
o.body = body_dict
return o
def get(self, filter_query_dict):
"""<|fim▁hole|> qs = self.query(filter_query_dict).limit(1)
if not qs:
raise self.model_cls.DoesNotExist(filter_query_dict)
return qs[0]
def count(self):
"""
件数取得
"""
if self.query_finished:
return len(self.result_list)
body = self.body.copy()
if 'sort' in body:
del body['sort']
with self.log_query(label='count', body=body):
result = self.es_client.count(
index=self.model_cls.INDEX,
doc_type=self.model_cls.DOC_TYPE,
body=body, **self.kwargs
)
self.latest_raw_result = result
return result['count']
def order_by(self, order_query_list):
"""
sort パラメータをつける
:type order_query_list: list, dict, string
- "mz_score"
- {"mz_score": "desc"}
"""
o = self._clone()
o.body['sort'] = order_query_list
return o
@property
def log_query(self):
"""
クエリをロギングするコンテクストマネージャ
elasticsearch や elasticsearch.trace のロガーを
DEBUG レベルで設定するともっと詳しく出る (結果が全部出る)
"""
@contextmanager
def _context(label='', body=None):
start_time = time.time()
yield
elapsed_time = time.time() - start_time
logger.debug('{}time:{}ms, body:{}'.format(
'{}: '.format(label) if label else '',
int(elapsed_time * 100), body or self.body))
return _context
def bulk(self, body):
return self.es_client.bulk(
body, index=self.model_cls.INDEX,
doc_type=self.model_cls.DOC_TYPE)
class ElasticDocumentManager(object):
"""
class ElasticDocumentManager(ElasticQuerySet)
でもいいんだけど、インスタンス変数が汚れる可能性があるので
クラスプロパティっぽい感じで、アクセスされるたびに新しいクエリセットを作ることにした
"""
def __init__(self, model_cls, body=None, **kwargs):
self.model_cls = model_cls
self.kwargs = kwargs
def __get__(self, cls, owner):
return ElasticQuerySet(self.model_cls)
class ElasticIndexManager(object):
def __init__(self, model_cls):
self.model_cls = model_cls
@cached_property
def mappings_properties(self):
return OrderedDict(
[
(f_name, f.mapping)
for f_name, f
in self.model_cls._cached_fields().items()
])
@cached_property
def mappings(self):
"""
インデックスの mappings の指定にそのまま使える dict
"""
return {
self.model_cls.DOC_TYPE: {
"properties": self.mappings_properties
}
}
def delete(self):
"""
インデックスを削除
:return:
"""
es = get_es_client()
es.indices.delete(self.model_cls.INDEX, ignore=[404, ])
@cached_property
def create_body_params(self):
body = {"mappings": self.mappings}
index_setting = getattr(self.model_cls, 'INDEX_SETTINGS', None)
if index_setting:
body["settings"] = index_setting
return body
def create(self):
"""
インデックスを作成
:return:
"""
es = get_es_client()
es.indices.create(
self.model_cls.INDEX, self.create_body_params)
def exists(self):
"""
インデックスが存在するか
"""
es = get_es_client()
return es.indices.exists(self.model_cls.INDEX)
class ElasticDocumentMeta(type):
def __new__(mcs, name, bases, attrs):
c = super(ElasticDocumentMeta, mcs).__new__(
mcs, name, bases, attrs)
c.objects = ElasticDocumentManager(c)
c.index = ElasticIndexManager(c)
return c<|fim▁end|> | 1件取得
複数件あってもエラーは出さず、黙って1件だけ返す
""" |
<|file_name|>rhuimanager_sync.py<|end_file_name|><|fim▁begin|>""" RHUIManager Sync functions """
import re
from stitches.expect import Expect
from rhuilib.rhuimanager import RHUIManager
from rhuilib.util import Util
class RHUIManagerSync(object):
'''
Represents -= Synchronization Status =- RHUI screen
'''
@staticmethod
def sync_cds(connection, cdslist):
'''
sync an individual CDS immediately
'''
RHUIManager.screen(connection, "sync")
Expect.enter(connection, "sc")
RHUIManager.select(connection, cdslist)
RHUIManager.proceed_with_check(connection, "The following CDS instances will be scheduled for synchronization:", cdslist)
RHUIManager.quit(connection)
@staticmethod
def sync_cluster(connection, clusterlist):
'''
sync a CDS cluster immediately
'''
RHUIManager.screen(connection, "sync")
Expect.enter(connection, "sl")
RHUIManager.select(connection, clusterlist)
RHUIManager.proceed_with_check(connection, "The following CDS clusters will be scheduled for synchronization:", clusterlist)
RHUIManager.quit(connection)
@staticmethod
def get_cds_status(connection, cdsname):
'''
display CDS sync summary
'''
RHUIManager.screen(connection, "sync")
Expect.enter(connection, "dc")
res_list = Expect.match(connection, re.compile(".*\n" + cdsname.replace(".", "\.") + "[\.\s]*\[([^\n]*)\].*" + cdsname.replace(".", "\.") + "\s*\r\n([^\n]*)\r\n", re.DOTALL), [1, 2], 60)
connection.cli.exec_command("killall -s SIGINT rhui-manager")
ret_list = []
for val in [res_list[0]] + res_list[1].split(" "):
val = Util.uncolorify(val.strip())
ret_list.append(val)
RHUIManager.quit(connection)
return ret_list
@staticmethod
def sync_repo(connection, repolist):
'''
sync an individual repository immediately
'''
RHUIManager.screen(connection, "sync")
Expect.enter(connection, "sr")
Expect.expect(connection, "Select one or more repositories.*for more commands:", 60)
Expect.enter(connection, "l")
RHUIManager.select(connection, repolist)
RHUIManager.proceed_with_check(connection, "The following repositories will be scheduled for synchronization:", repolist)
RHUIManager.quit(connection)
@staticmethod<|fim▁hole|> '''
display repo sync summary
'''
RHUIManager.screen(connection, "sync")
Expect.enter(connection, "dr")
reponame_quoted = reponame.replace(".", "\.")
res = Expect.match(connection, re.compile(".*" + reponame_quoted + "\s*\r\n([^\n]*)\r\n.*", re.DOTALL), [1], 60)[0]
connection.cli.exec_command("killall -s SIGINT rhui-manager")
res = Util.uncolorify(res)
ret_list = res.split(" ")
for i in range(len(ret_list)):
ret_list[i] = ret_list[i].strip()
RHUIManager.quit(connection)
return ret_list<|fim▁end|> | def get_repo_status(connection, reponame): |
<|file_name|>validapass.py<|end_file_name|><|fim▁begin|>#! /usr/bin/python3
import re
err = "La contraseña no es segura"
msg = "Escriba una contraseña al menos 8 caracteres alfanumericos"
def ismayor8(a):
"""
Compara si es mayor a 8 caracteres
"""
if (len(a) < 8):
return False<|fim▁hole|>
def minus(a):
"""
compara si existe alguna letra minuscula
"""
patron = ('[a-z]')
flag = False
for letra in a:
if (re.match(patron, letra)):
flag = True
return flag
def mayus(a):
"""
Compara si existe alguna letra mayuscula
"""
patron = ('[A-Z]')
flag = False
for letra in a:
if (re.match(patron, letra)):
flag = True
return flag
def unnum(a):
"""
Compara si existe algun número
"""
patron = ('[0-9]')
flag = False
for letra in a:
if (re.match(patron, letra)):
flag = True
return flag
def alfanumeric(a):
"""
Compara si la cadena es alfanumerica
"""
if (a.isalnum()):
return True
else:
return False
def vpass():
"""
Validamos contraseña
"""
salida = False
while salida is False:
try:
print (msg, end='\n')
paswd = str(input('passwd: '))
if (ismayor8(paswd)):
if (alfanumeric(paswd)):
if (minus(paswd) and mayus(paswd) and unnum(paswd)):
salida = True
else:
print (err, end='\n')
else:
print (err, end='\n')
except (KeyboardInterrupt, EOFError):
print (msg, end='\n')
return salida<|fim▁end|> | return True
|
<|file_name|>investopedia_generator.py<|end_file_name|><|fim▁begin|>#Parsing program to sort through Investopedia
import urllib2
import re
#This is the code to parse the List of Terms
def get_glossary(res_num):
html_lowered = res_num.lower();
begin = html_lowered.find('<!-- .alphabet -->')
end = html_lowered.find('<!-- .idx-1 -->')
if begin == -1 or end == -1:
return None
else:
return res_num[begin+len('<!-- .alphabet -->'):end].strip()
#This is the code to parse the Title
def get_title(res_num):
html_lowered = res_num.lower();
begin = html_lowered.find('<title>')
end = html_lowered.find('</title>')
if begin == -1 or end == -1:
return None
else:
return res_num[begin+len('<title>'):end].strip()
#We start with the numbers section of Investopedia
url = "http://www.investopedia.com/terms/1/"
res_num=""
for line in urllib2.urlopen(url):
res_num+=line
title_num = get_title(res_num)
glossary_num = get_glossary(res_num)
##Find all hyperlinks in list then eliminate duplicates
glossary_parsed_num = re.findall(r'href=[\'"]?([^\'" >]+)', glossary_num)
glossary_parsed_num = list(set(glossary_parsed_num))
parent_url = 'http://www.investopedia.com'
tail = ' Definition | Investopedia'
short_tail = ' | Investopedia'
print title_num
gp_list = []
for x in glossary_parsed_num:
gpn = parent_url + x
res_num=""
for line in urllib2.urlopen(gpn):
res_num+=line
gpn_title = get_title(res_num)
gpn_penult = gpn_title.replace(tail,'')
gpn_final = gpn_penult.replace(short_tail,'')
gp_list.append(gpn_final)
#The alphabet section of Investopedia terms begins here
alfa = [chr(i) for i in xrange(ord('a'), ord('z')+1)]
for i, v in enumerate(alfa):
u = 'http://www.investopedia.com/terms/'
w = '/'
invest_alfa_url = u + v + w
# get url info
res_alfa=""
for line in urllib2.urlopen(invest_alfa_url):
res_alfa+=line
glossary_alfa = get_glossary(res_alfa)
title_alfa = get_title(res_alfa)
glossary_parsed_alfa = re.findall(r'href=[\'"]?([^\'" >]+)', glossary_alfa)
glossary_parsed_alfa = list(set(glossary_parsed_alfa))
print title_alfa
for x in glossary_parsed_alfa:
gpa = parent_url + x
res_num=""
for line in urllib2.urlopen(gpa):
res_num+=line
gpa_title = get_title(res_num)
gpa_penult = gpa_title.replace(tail,'')
gpa_final = gpa_penult.replace(short_tail,'')<|fim▁hole|>with open('dict.dat','w') as f:
for item in gp_list:
f.write('%s\n' % item)
#Read back file to check the stock was added correctly
with open('dict.dat') as f:
gp_list = f.readlines()
gp_list = map(lambda s: s.strip(), gp_list)
gp_list = list(set(gp_list))
print gp_list
print ''<|fim▁end|> | gp_list.append(gpa_final)
#Write the new list to the file |
<|file_name|>camera.js<|end_file_name|><|fim▁begin|>"use strict";
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
return c > 3 && r && Object.defineProperty(target, key, r), r;
};
var plugin_1 = require('./plugin');
/**
* @name Camera
* @description<|fim▁hole|> * @usage
* ```typescript
* import { Camera } from 'ionic-native';
*
*
* Camera.getPicture(options).then((imageData) => {
* // imageData is either a base64 encoded string or a file URI
* // If it's base64:
* let base64Image = 'data:image/jpeg;base64,' + imageData;
* }, (err) => {
* // Handle error
* });
* ```
* @interfaces
* CameraOptions
* CameraPopoverOptions
*/
var Camera = (function () {
function Camera() {
}
/**
* Take a picture or video, or load one from the library.
* @param {CameraOptions?} options optional. Options that you want to pass to the camera. Encoding type, quality, etc. Platform-specific quirks are described in the [Cordova plugin docs](https://github.com/apache/cordova-plugin-camera#cameraoptions-errata-).
* @returns {Promise<any>} Returns a Promise that resolves with Base64 encoding of the image data, or the image file URI, depending on cameraOptions, otherwise rejects with an error.
*/
Camera.getPicture = function (options) { return; };
/**
* Remove intermediate image files that are kept in temporary storage after calling camera.getPicture.
* Applies only when the value of Camera.sourceType equals Camera.PictureSourceType.CAMERA and the Camera.destinationType equals Camera.DestinationType.FILE_URI.
* @returns {Promise<any>}
*/
Camera.cleanup = function () { return; };
;
/**
* @private
* @enum {number}
*/
Camera.DestinationType = {
/** Return base64 encoded string. DATA_URL can be very memory intensive and cause app crashes or out of memory errors. Use FILE_URI or NATIVE_URI if possible */
DATA_URL: 0,
/** Return file uri (content://media/external/images/media/2 for Android) */
FILE_URI: 1,
/** Return native uri (eg. asset-library://... for iOS) */
NATIVE_URI: 2
};
/**
* @private
* @enum {number}
*/
Camera.EncodingType = {
/** Return JPEG encoded image */
JPEG: 0,
/** Return PNG encoded image */
PNG: 1
};
/**
* @private
* @enum {number}
*/
Camera.MediaType = {
/** Allow selection of still pictures only. DEFAULT. Will return format specified via DestinationType */
PICTURE: 0,
/** Allow selection of video only, ONLY RETURNS URL */
VIDEO: 1,
/** Allow selection from all media types */
ALLMEDIA: 2
};
/**
* @private
* @enum {number}
*/
Camera.PictureSourceType = {
/** Choose image from picture library (same as SAVEDPHOTOALBUM for Android) */
PHOTOLIBRARY: 0,
/** Take picture from camera */
CAMERA: 1,
/** Choose image from picture library (same as PHOTOLIBRARY for Android) */
SAVEDPHOTOALBUM: 2
};
/**
* @private
* Matches iOS UIPopoverArrowDirection constants to specify arrow location on popover.
* @enum {number}
*/
Camera.PopoverArrowDirection = {
ARROW_UP: 1,
ARROW_DOWN: 2,
ARROW_LEFT: 4,
ARROW_RIGHT: 8,
ARROW_ANY: 15
};
/**
* @private
* @enum {number}
*/
Camera.Direction = {
/** Use the back-facing camera */
BACK: 0,
/** Use the front-facing camera */
FRONT: 1
};
__decorate([
plugin_1.Cordova({
callbackOrder: 'reverse'
})
], Camera, "getPicture", null);
__decorate([
plugin_1.Cordova({
platforms: ['iOS']
})
], Camera, "cleanup", null);
Camera = __decorate([
plugin_1.Plugin({
pluginName: 'Camera',
plugin: 'cordova-plugin-camera',
pluginRef: 'navigator.camera',
repo: 'https://github.com/apache/cordova-plugin-camera',
platforms: ['Android', 'BlackBerry', 'Browser', 'Firefox', 'FireOS', 'iOS', 'Windows', 'Windows Phone 8', 'Ubuntu']
})
], Camera);
return Camera;
}());
exports.Camera = Camera;
//# sourceMappingURL=camera.js.map<|fim▁end|> | * Take a photo or capture video.
*
* Requires {@link module:driftyco/ionic-native} and the Cordova plugin: `cordova-plugin-camera`. For more info, please see the [Cordova Camera Plugin Docs](https://github.com/apache/cordova-plugin-camera).
* |
<|file_name|>nxos_vtp_password.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_vtp_password
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages VTP password configuration.
description:
- Manages VTP password configuration.
author:
- Gabriele Gerbino (@GGabriele)
notes:
- VTP feature must be active on the device to use this module.
- This module is used to manage only VTP passwords.
- Use this in combination with M(nxos_vtp_domain) and M(nxos_vtp_version)
to fully manage VTP operations.
- You can set/remove password only if a VTP domain already exist.
- If C(state=absent) and no C(vtp_password) is provided, it remove the current
VTP password.
- If C(state=absent) and C(vtp_password) is provided, the proposed C(vtp_password)
has to match the existing one in order to remove it.
options:
vtp_password:
description:
- VTP password
required: false
default: null
state:
description:
- Manage the state of the resource
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ENSURE VTP PASSWORD IS SET
- nxos_vtp_password:
password: ntc
state: present
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
# ENSURE VTP PASSWORD IS REMOVED
- nxos_vtp_password:
password: ntc
state: absent
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"vtp_password": "new_ntc"}
existing:
description:
- k/v pairs of existing vtp
returned: always
type: dict
sample: {"domain": "ntc", "version": "1", "vtp_password": "ntc"}
end_state:
description: k/v pairs of vtp after module execution
returned: always
type: dict
sample: {"domain": "ntc", "version": "1", "vtp_password": "new_ntc"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["vtp password new_ntc"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
from ansible.module_utils.nxos import get_config, load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
import re
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
if 'show run' not in command:
command += ' | json'
cmds = [command]
body = run_commands(module, cmds)<|fim▁hole|> body = run_commands(module, cmds)
return body
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def get_vtp_config(module):
command = 'show vtp status'
body = execute_show_command(
command, module, command_type='cli_show_ascii')[0]
vtp_parsed = {}
if body:
version_regex = '.*VTP version running\s+:\s+(?P<version>\d).*'
domain_regex = '.*VTP Domain Name\s+:\s+(?P<domain>\S+).*'
try:
match_version = re.match(version_regex, body, re.DOTALL)
version = match_version.groupdict()['version']
except AttributeError:
version = ''
try:
match_domain = re.match(domain_regex, body, re.DOTALL)
domain = match_domain.groupdict()['domain']
except AttributeError:
domain = ''
if domain and version:
vtp_parsed['domain'] = domain
vtp_parsed['version'] = version
vtp_parsed['vtp_password'] = get_vtp_password(module)
return vtp_parsed
def get_vtp_password(module):
command = 'show vtp password'
body = execute_show_command(command, module)[0]
password = body['passwd']
if password:
return str(password)
else:
return ""
def main():
argument_spec = dict(
vtp_password=dict(type='str', no_log=True),
state=dict(choices=['absent', 'present'],
default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
vtp_password = module.params['vtp_password'] or None
state = module.params['state']
existing = get_vtp_config(module)
end_state = existing
args = dict(vtp_password=vtp_password)
changed = False
proposed = dict((k, v) for k, v in args.items() if v is not None)
delta = dict(set(proposed.items()).difference(existing.items()))
commands = []
if state == 'absent':
if vtp_password is not None:
if existing['vtp_password'] == proposed['vtp_password']:
commands.append(['no vtp password'])
else:
module.fail_json(msg="Proposed vtp password doesn't match "
"current vtp password. It cannot be "
"removed when state=absent. If you are "
"trying to change the vtp password, use "
"state=present.")
else:
if not existing.get('domain'):
module.fail_json(msg='Cannot remove a vtp password '
'before vtp domain is set.')
elif existing['vtp_password'] != ('\\'):
commands.append(['no vtp password'])
elif state == 'present':
if delta:
if not existing.get('domain'):
module.fail_json(msg='Cannot set vtp password '
'before vtp domain is set.')
else:
commands.append(['vtp password {0}'.format(vtp_password)])
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
load_config(module, cmds)
end_state = get_vtp_config(module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
module.exit_json(**results)
if __name__ == '__main__':
main()<|fim▁end|> | elif module.params['transport'] == 'nxapi':
cmds = [command] |
<|file_name|>employee-detail.component.ts<|end_file_name|><|fim▁begin|>///<reference path="../../node_modules/angular2/typings/browser.d.ts"/>
import { Component, OnInit } from 'angular2/core';
import { RouteParams, Router, ROUTER_DIRECTIVES } from 'angular2/router';
import { EmployeeEditFormComponent } from './employee-edit-form.component';
import { EmployeeDetailServiceComponent } from '../services/employee-detail-service.component';
import { EmployeeDeleteServiceComponent } from '../services/employee-delete-service.component';
@Component({
selector: 'employee-detail',
templateUrl: 'src/pages/employee-detail.component.html',
providers: [
EmployeeDetailServiceComponent,
EmployeeDeleteServiceComponent
],
directives: [ ROUTER_DIRECTIVES, EmployeeEditFormComponent ]
})
export class EmployeeDetailComponent implements OnInit {
public currentEmployee;
public errorMessage: string;
constructor(
private _router: Router,
private _routeParams: RouteParams,
private _detailService: EmployeeDetailServiceComponent,
private _deleteService: EmployeeDeleteServiceComponent<|fim▁hole|>
ngOnInit() {
let id = parseInt(this._routeParams.get('id'));
this._detailService.getEmployee(id).subscribe(
employee => this.currentEmployee = employee,
error => this.errorMessage = <any>error
);
}
deleteHandler(id: number) {
this._deleteService.deleteEmployee(id).subscribe(
employee => this.currentEmployee = employee,
errorMessage => this.errorMessage = errorMessage,
() => this._router.navigate(['EmployeeList'])
)
}
}<|fim▁end|> | ){} |
<|file_name|>editortool.py<|end_file_name|><|fim▁begin|>from OpenGL import GL
import numpy
from depths import DepthOffset
from pymclevel import BoundingBox
from config import config
from albow.translate import _
class EditorTool(object):
surfaceBuild = False
panel = None
optionsPanel = None
toolIconName = None
worldTooltipText = None
previewRenderer = None
tooltipText = "???"
def levelChanged(self):
""" called after a level change """
pass
<|fim▁hole|> def statusText(self):
return ""
@property
def cameraDistance(self):
return self.editor.cameraToolDistance
def toolEnabled(self):
return True
def __init__(self, editor):
self.editor = editor
self.__hotkey = None
@property
def hotkey(self):
return _(self.__hotkey)
@hotkey.setter
def hotkey(self, k):
self.__hotkey = k
def toolReselected(self):
pass
def toolSelected(self):
pass
def drawTerrainReticle(self):
pass
def drawTerrainMarkers(self):
pass
def drawTerrainPreview(self, origin):
if self.previewRenderer is None:
return
self.previewRenderer.origin = map(lambda a, b: a - b, origin, self.level.bounds.origin)
GL.glPolygonOffset(DepthOffset.ClonePreview, DepthOffset.ClonePreview)
GL.glEnable(GL.GL_POLYGON_OFFSET_FILL)
self.previewRenderer.draw()
GL.glDisable(GL.GL_POLYGON_OFFSET_FILL)
def rotate(self, amount=1, blocksOnly=False):
pass
def roll(self, amount=1, blocksOnly=False):
pass
def flip(self, amount=1, blocksOnly=False):
pass
def mirror(self, amount=1, blocksOnly=False):
pass
def swap(self, amount=1):
pass
def mouseDown(self, evt, pos, direction):
'''pos is the coordinates of the block under the cursor,
direction indicates which face is under it. the tool performs
its action on the specified block'''
pass
def mouseUp(self, evt, pos, direction):
pass
def mouseDrag(self, evt, pos, direction):
pass
def keyDown(self, evt):
pass
def keyUp(self, evt):
pass
def increaseToolReach(self):
"Return True if the tool handles its own reach"
return False
def decreaseToolReach(self):
"Return True if the tool handles its own reach"
return False
def resetToolReach(self):
"Return True if the tool handles its own reach"
return False
def confirm(self):
''' called when user presses enter '''
pass
def cancel(self):
'''cancel the current operation. called when a different tool
is picked, escape is pressed, or etc etc'''
self.hidePanel()
# pass
def findBestTrackingPlane(self, face):
cv = list(self.editor.mainViewport.cameraVector)
cv[face >> 1] = 0
cv = map(abs, cv)
return cv.index(max(cv))
def drawToolReticle(self):
'''get self.editor.blockFaceUnderCursor for pos and direction.
pos is the coordinates of the block under the cursor,
direction indicates which face is under it. draw something to
let the user know where the tool is going to act. e.g. a
transparent block for the block placing tool.'''
pass
def drawToolMarkers(self):
''' draw any markers the tool wants to leave in the field
while another tool is out. e.g. the current selection for
SelectionTool'''
pass
def selectionChanged(self):
""" called when the selection changes due to nudge. other tools can be active. """
pass
edge_factor = 0.1
def boxFaceUnderCursor(self, box):
if self.editor.mainViewport.mouseMovesCamera:
return None, None
p0 = self.editor.mainViewport.cameraPosition
normal = self.editor.mainViewport.mouseVector
if normal is None:
return None, None
points = {}
# glPointSize(5.0)
# glColor(1.0, 1.0, 0.0, 1.0)
# glBegin(GL_POINTS)
for dim in range(3):
dim1 = dim + 1
dim2 = dim + 2
dim1 %= 3
dim2 %= 3
def pointInBounds(point, x):
return box.origin[x] <= point[x] <= box.maximum[x]
neg = normal[dim] < 0
for side in 0, 1:
d = (box.maximum, box.origin)[side][dim] - p0[dim]
if d >= 0 or (neg and d <= 0):
if normal[dim]:
scale = d / normal[dim]
point = map(lambda a, p: (a * scale + p), normal, p0)
# glVertex3f(*point)
if pointInBounds(point, dim1) and pointInBounds(point, dim2):
points[dim * 2 + side] = point
# glEnd()
if not len(points):
return None, None
cp = self.editor.mainViewport.cameraPosition
distances = dict(
(numpy.sum(map(lambda a, b: (b - a) ** 2, cp, point)), (face, point)) for face, point in points.iteritems())
if not len(distances):
return None, None
# When holding alt, pick the face opposite the camera
# if key.get_mods() & KMOD_ALT:
# minmax = max
# else:
face, point = distances[min(distances.iterkeys())]
# if the point is near the edge of the face, and the edge is facing away,
# return the away-facing face
dim = face // 2
dim1, dim2 = dim + 1, dim + 2
dim1, dim2 = dim1 % 3, dim2 % 3
cv = self.editor.mainViewport.cameraVector
# determine if a click was within self.edge_factor of the edge of a selection box side. if so, click through
# to the opposite side
for d in dim1, dim2:
edge_width = box.size[d] * self.edge_factor
facenormal = [0, 0, 0]
cameraBehind = False
if point[d] - box.origin[d] < edge_width:
facenormal[d] = -1
cameraBehind = cp[d] - box.origin[d] > 0
if point[d] - box.maximum[d] > -edge_width:
facenormal[d] = 1
cameraBehind = cp[d] - box.maximum[d] < 0
if numpy.dot(facenormal, cv) > 0 or cameraBehind:
# the face adjacent to the clicked edge faces away from the cam
return distances[max(distances.iterkeys())]
return face, point
def selectionCorners(self):
""" returns the positions of the two selection corners as a pair of 3-tuples, each ordered x,y,z """
if (None != self.editor.selectionTool.bottomLeftPoint and
None != self.editor.selectionTool.topRightPoint):
return (self.editor.selectionTool.bottomLeftPoint,
self.editor.selectionTool.topRightPoint)
return None
def selectionBoxForCorners(self, p1, p2):
''' considers p1,p2 as the marked corners of a selection.
returns a BoundingBox containing all the blocks within.'''
if self.editor.level is None:
return None
p1, p2 = list(p1), list(p2)
# d = [(a-b) for a,b in zip(p1,p2)]
for i in range(3):
if p1[i] > p2[i]:
t = p2[i]
p2[i] = p1[i]
p1[i] = t
p2[i] += 1
size = map(lambda a, b: a - b, p2, p1)
if p1[1] < 0:
size[1] += p1[1]
p1[1] = 0
h = self.editor.level.Height
if p1[1] >= h:
p1[1] = h - 1
size[1] = 1
if p1[1] + size[1] >= h:
size[1] = h - p1[1]
return BoundingBox(p1, size)
def selectionBox(self):
''' selection corners, ordered, with the greater point moved up one block for use as the ending value of an array slice '''
c = self.selectionCorners()
if c:
return self.selectionBoxForCorners(*c)
return None
def selectionSize(self):
''' returns a tuple containing the size of the selection (x,y,z)'''
c = self.selectionBox()
if c is None:
return None
return c.size
@property
def maxBlocks(self):
return config.settings.blockBuffer.get() / 2 # assume block buffer in bytes
def showPanel(self):
pass
def hidePanel(self):
if self.panel and self.panel.parent:
self.panel.parent.remove(self.panel)
self.panel = None<|fim▁end|> | @property |
<|file_name|>example.py<|end_file_name|><|fim▁begin|># Example taken from the http://gunicorn.org/configure.html
# page.
import os
def numCPUs():
if not hasattr(os, "sysconf"):
raise RuntimeError("No sysconf detected.")
return os.sysconf("SC_NPROCESSORS_ONLN")
bind = "127.0.0.1:8000"<|fim▁hole|><|fim▁end|> | workers = numCPUs() * 2 + 1 |
<|file_name|>Register.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
@author: Tobias
"""
"""@brief List of register classes"""
_registerClasses = [
['al', 'ah', 'ax', 'eax', 'rax'],
['bl', 'bh', 'bx', 'ebx', 'rbx'],
['cl', 'ch', 'cx', 'ecx', 'rcx'],
['dl', 'dh', 'dx', 'edx', 'rdx'],
['bpl', 'bp', 'ebp', 'rbp'],
['dil', 'di', 'edi', 'rdi'],
['sil', 'si', 'esi', 'rsi'],
['spl', 'sp', 'esp', 'rsp'],
['r8l', 'r8w', 'r8d', 'r8'],
['r9l', 'r9w', 'r9d', 'r9'],
['r10l', 'r10w', 'r10d', 'r10'],
['r11l', 'r11w', 'r11d', 'r11'],
['r12l', 'r12w', 'r12d', 'r12'],
['r13l', 'r13w', 'r13d', 'r13'],
['r14l', 'r14w', 'r14d', 'r14'],
['r15l', 'r15w', 'r15d', 'r15']
]
def get_reg_class(reg):
"""
@brief Determines the register class of a given reg.
All different register names that address the same register
belong to the same register class e.g.: 'ax' and 'eax'
@param reg name of register
@return register class
"""
lreg = reg.lower()
ret_value = None
for pos, reg_list in enumerate(_registerClasses):
for reg in reg_list:
found = False
if reg == lreg:
found = True
ret_value = pos
break
if found:
break
return ret_value
def get_reg_by_size(reg_class, reg_size):
"""
@brief Determines the register by its size and class
@param reg_class The register class of the register
@param reg_size The size of the register
@return Name of the register
"""
if reg_class >= len(_registerClasses):
return None
num_regs = len(_registerClasses[reg_class])
if num_regs < 4:
return None
reg_index = -1
if reg_size > 32: # 64-bit regs
reg_index = num_regs - 1
elif reg_size > 16: # 32-bit regs
reg_index = num_regs - 2
elif reg_size > 8: # 16-bit regs
reg_index = num_regs - 3
elif reg_size > 0: # 8-bit regs
reg_index = 0
else:
return None
return _registerClasses[reg_class][reg_index]
def get_size_by_reg(reg):
"""
@brief Determines the size of the given register
@param reg Register
@return Size of register
"""
reg_class = get_reg_class(reg)
num_regs = len(_registerClasses[reg_class])
for index, test_reg in enumerate(_registerClasses[reg_class]):
if test_reg == reg:
break
else: # no break
return None
if index == (num_regs-1):
return 64
elif index == (num_regs-2):
return 32
elif index == (num_regs-3):
return 16
else:
return 8
def get_reg_class_lst(reg_class):
"""
@return Returns the whole list of a given register class
"""<|fim▁hole|><|fim▁end|> | return _registerClasses[reg_class] |
<|file_name|>elaborate-trait-pred.rs<|end_file_name|><|fim▁begin|>// run-pass
// Test that we use the elaborated predicates from traits
// to satisfy const evaluatable predicates.
#![feature(generic_const_exprs)]
#![allow(incomplete_features)]
use std::mem::size_of;
trait Foo: Sized
where<|fim▁hole|>impl Foo for u64 {}
impl Foo for u32 {}
fn foo<T: Foo>() -> [u8; size_of::<T>()] {
[0; size_of::<T>()]
}
fn main() {
assert_eq!(foo::<u32>(), [0; 4]);
assert_eq!(foo::<u64>(), [0; 8]);
}<|fim▁end|> | [(); size_of::<Self>()]: Sized,
{
}
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># encoding: utf8
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = []
<|fim▁hole|> options = {},
name = 'Contact',
),
migrations.CreateModel(
fields = [(u'id', models.AutoField(verbose_name=u'ID', serialize=False, auto_created=True, primary_key=True),), ('date', models.DateTimeField(),), ('title', models.CharField(max_length=255),), ('code', models.CharField(max_length=255),), ('summary', models.TextField(),)],
bases = (models.Model,),
options = {},
name = 'Commits',
),
]<|fim▁end|> | operations = [
migrations.CreateModel(
fields = [(u'id', models.AutoField(verbose_name=u'ID', serialize=False, auto_created=True, primary_key=True),), ('name', models.CharField(max_length=255),), ('email', models.EmailField(max_length=75),), ('message', models.TextField(),), ('date', models.DateField(auto_now=True),)],
bases = (models.Model,), |
<|file_name|>0146_add_service_callback_api.py<|end_file_name|><|fim▁begin|>"""
Revision ID: 0146_add_service_callback_api
Revises: 0145_add_notification_reply_to<|fim▁hole|>"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0146_add_service_callback_api'
down_revision = '0145_add_notification_reply_to'
def upgrade():
op.create_table('service_callback_api_history',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.Column('bearer_token', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('updated_by_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('version', sa.Integer(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', 'version')
)
op.create_index(op.f('ix_service_callback_api_history_service_id'), 'service_callback_api_history',
['service_id'], unique=False)
op.create_index(op.f('ix_service_callback_api_history_updated_by_id'), 'service_callback_api_history',
['updated_by_id'], unique=False)
op.create_table('service_callback_api',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('url', sa.String(), nullable=False),
sa.Column('bearer_token', sa.String(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('updated_by_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('version', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.ForeignKeyConstraint(['updated_by_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_service_callback_api_service_id'), 'service_callback_api', ['service_id'], unique=True)
op.create_index(op.f('ix_service_callback_api_updated_by_id'), 'service_callback_api', ['updated_by_id'], unique=False)
def downgrade():
op.drop_index(op.f('ix_service_callback_api_updated_by_id'), table_name='service_callback_api')
op.drop_index(op.f('ix_service_callback_api_service_id'), table_name='service_callback_api')
op.drop_table('service_callback_api')
op.drop_index(op.f('ix_service_callback_api_history_updated_by_id'), table_name='service_callback_api_history')
op.drop_index(op.f('ix_service_callback_api_history_service_id'), table_name='service_callback_api_history')
op.drop_table('service_callback_api_history')<|fim▁end|> | Create Date: 2017-11-28 15:13:48.730554
|
<|file_name|>topicview.py<|end_file_name|><|fim▁begin|># coding: utf8
# topicview.py
# 1/2/2015 jichi
if __name__ == '__main__':
import sys
sys.path.append('..')
import debug
debug.initenv()
import json
from functools import partial
from PySide.QtCore import Qt, QObject
from Qt5 import QtWidgets
from sakurakit import skevents, skqss
from sakurakit.skclass import Q_Q, memoized, memoizedproperty
from sakurakit.skdebug import dprint, dwarn
from sakurakit.sktr import tr_
from sakurakit.skwebkit import SkWebView #, SkWebViewBean
from sakurakit.skwidgets import SkTitlelessDockWidget, SkStyleView
#from sakurakit.skqml import QmlObject
from mytr import mytr_
import comets, config, dataman, netman, osutil, rc
@Q_Q
class _TopicView(object):
def __init__(self, q):
self.topicId = 0 # long
self.topicComet = None
self._createUi(q)
#shortcut('ctrl+n', self._new, parent=q)
def _createUi(self, q):
q.setCentralWidget(self.webView)
dock = SkTitlelessDockWidget(self.inspector)
dock.setFeatures(QtWidgets.QDockWidget.NoDockWidgetFeatures)
#dock.setAllowedAreas(Qt.BottomDockWidgetArea)
q.addDockWidget(Qt.BottomDockWidgetArea, dock)
def clear(self):
self.setTopicId(0)
def setTopicId(self, topicId): # long ->
if self.topicId != topicId:
self.topicId = topicId
if not topicId:
if self.topicComet:
self.topicComet.setActive(False)
else:
if not self.topicComet:
self.topicComet = comets.createPostComet()
qml = self.topicComet.q
#qml.topicDataReceived.connect(self._onTopicReceived)
qml.topicDataUpdated.connect(self._onTopicUpdated)
qml.postDataUpdated.connect(self._onPostUpdated)
qml.postDataReceived.connect(self._onPostReceived)
path = 'topic/%s' % topicId
self.topicComet.setPath(path)
if netman.manager().isOnline():
self.topicComet.setActive(True)
def _injectBeans(self):
h = self.webView.page().mainFrame()
#h.addToJavaScriptWindowObject('bean', self._webBean)
for name,obj in self._beans:
h.addToJavaScriptWindowObject(name, obj)
@memoizedproperty
def _beans(self):
"""
return [(unicode name, QObject bean)]
"""
import coffeebean
m = coffeebean.manager()
return (
('cacheBean', m.cacheBean),
('i18nBean', m.i18nBean),
('mainBean', m.mainBean),
('topicEditBean', self.topicEditBean),
#('topicInputBean', self.topicInputBean),
('postEditBean', self.postEditBean),
('postInputBean', self.postInputBean),
)
@memoizedproperty
def postEditBean(self):
import postedit
return postedit.PostEditorManagerBean(parent=self.q, manager=self.postEditorManager)
@memoizedproperty
def postInputBean(self):
import postinput
return postinput.PostInputManagerBean(parent=self.q, manager=self.postInputManager)
@memoizedproperty
def topicEditBean(self):
import topicedit
return topicedit.TopicEditorManagerBean(parent=self.q, manager=self.topicEditorManager)
#@memoizedproperty
#def topicInputBean(self):
# import topicinput
# return topicinput.TopicInputManagerBean(parent=self.q, manager=self.topicInputManager)
@memoizedproperty
def postEditorManager(self):
import postedit
ret = postedit.PostEditorManager(self.q)
ret.postChanged.connect(self._updatePost)
return ret
@memoizedproperty
def postInputManager(self):
import postinput
ret = postinput.PostInputManager(self.q)
ret.postReceived.connect(self._submitPost)
return ret
@memoizedproperty
def topicEditorManager(self):
import topicedit
ret = topicedit.TopicEditorManager(self.q)
ret.topicChanged.connect(self._updateTopic)
return ret
#@memoizedproperty
#def topicInputManager(self):
# import topicinput
# ret = topicinput.TopicInputManager(self.q)
# ret.topicReceived.connect(self._submitTopic)
# return ret
def _submitPost(self, postData, imageData):
if self.topicId and netman.manager().isOnline():
import forumapi
skevents.runlater(partial(forumapi.manager().submitPost,
postData, imageData,
topicId=self.topicId))
def _updatePost(self, postData, imageData):
if self.topicId and netman.manager().isOnline():
import forumapi
skevents.runlater(partial(forumapi.manager().updatePost,
postData, imageData))
#def _submitTopic(self, topicData, imageData, ticketData):
# subjectId = self.subjectId
# if subjectId:
# subjectType = 'game'
# else:
# subjectId = config.GLOBAL_SUBJECT_ID
# subjectType = 'subject'
# if netman.manager().isOnline():
# import forumapi
# skevents.runlater(partial(forumapi.manager().submitTopic,
# topicData, imageData, ticketData,
# subjectId=subjectId, subjectType=subjectType))
def _updateTopic(self, topicData, imageData, ticketData):
if netman.manager().isOnline():
import forumapi
skevents.runlater(partial(forumapi.manager().updateTopic,
topicData, imageData, ticketData))
def _onPostReceived(self, data): # str ->
try:
obj = json.loads(data)
topicId = obj['topicId']
if topicId == self.topicId and self.q.isVisible():
self.addPost(data)
dprint("pass")
except Exception, e:
dwarn(e)
def _onPostUpdated(self, data): # str ->
try:
obj = json.loads(data)
topicId = obj['topicId']
if topicId == self.topicId and self.q.isVisible():
self.updatePost(data)
dprint("pass")
except Exception, e:
dwarn(e)
#def _onTopicReceived(self, data): # str ->
# try:
# obj = json.loads(data)
# subjectId = obj['subjectId']
# if subjectId == self.subjectId and self.q.isVisible():
# self.addTopic(data)
# dprint("pass")
# except Exception, e:
# dwarn(e)
def _onTopicUpdated(self, data): # str ->
try:
obj = json.loads(data)
topicId = obj['id']
if topicId == self.topicId and self.q.isVisible():
self.updateTopic(data)
dprint("pass")
except Exception, e:
dwarn(e)
@memoizedproperty
def webView(self):
from PySide.QtWebKit import QWebPage
ret = SkWebView()
ret.titleChanged.connect(self.q.setWindowTitle)
ret.enableHighlight() # highlight selected text
ret.ignoreSslErrors() # needed to access Twitter
ret.pageAction(QWebPage.Reload).triggered.connect(
self.refresh, Qt.QueuedConnection)
ret.page().setLinkDelegationPolicy(QWebPage.DelegateAllLinks) # Since there are local images
ret.page().mainFrame().setScrollBarPolicy(Qt.Horizontal, Qt.ScrollBarAlwaysOff) # disable horizontal scroll
#ret.page().setLinkDelegationPolicy(QWebPage.DelegateExternalLinks)
ret.linkClicked.connect(osutil.open_url)
return ret
def refresh(self):
"""@reimp"""
self.newPostButton.setVisible(bool(self.topicId))
#self.gameButton.setVisible(bool(self.subjectId))
host = config.API_HOST # must be the same as rest.coffee for the same origin policy
user = dataman.manager().user()
w = self.webView
w.setHtml(rc.haml_template('haml/reader/topicview').render({
'host': host,
'locale': config.language2htmllocale(user.language),
'title': tr_("Topic"),
'topicId': self.topicId,
'userName': user.name if not user.isGuest() else '',
'userPassword': user.password,
'rc': rc,
'tr': tr_,
}), host)
self._injectBeans()
@memoizedproperty
def inspector(self):
ret = SkStyleView()
skqss.class_(ret, 'texture')
layout = QtWidgets.QHBoxLayout()
layout.addWidget(self.newPostButton)
#layout.addWidget(self.newTopicButton)
layout.addStretch()
layout.addWidget(self.browseButton)
#layout.addWidget(self.gameButton)
layout.addWidget(self.refreshButton)
ret.setLayout(layout)
layout.setContentsMargins(4, 4, 4, 4)
return ret
@memoizedproperty
def refreshButton(self):
ret = QtWidgets.QPushButton(tr_("Refresh"))
skqss.class_(ret, 'btn btn-primary')<|fim▁hole|> ret.clicked.connect(self.refresh)
#nm = netman.manager()
#ret.setEnabled(nm.isOnline())
#nm.onlineChanged.connect(ret.setEnabled)
return ret
#@memoizedproperty
#def gameButton(self):
# ret = QtWidgets.QPushButton(tr_("Game"))
# skqss.class_(ret, 'btn btn-info')
# ret.setToolTip(tr_("Game"))
# #ret.setStatusTip(ret.toolTip())
# ret.clicked.connect(self._showGame)
# return ret
#def _showGame(self):
# import main
# main.manager().showGameView(itemId=self.subjectId)
@memoizedproperty
def browseButton(self):
ret = QtWidgets.QPushButton(tr_("Browse"))
skqss.class_(ret, 'btn btn-default')
ret.setToolTip(tr_("Browse"))
#ret.setStatusTip(ret.toolTip())
ret.clicked.connect(lambda:
osutil.open_url("http://sakuradite.com/topic/%s" % self.topicId))
return ret
@memoizedproperty
def newPostButton(self):
ret = QtWidgets.QPushButton("+ " + tr_("Reply"))
skqss.class_(ret, 'btn btn-primary')
ret.setToolTip(tr_("New"))
#ret.setStatusTip(ret.toolTip())
ret.clicked.connect(self._newPost)
return ret
def _newPost(self):
self.postInputManager.newPost(self.topicId)
# append ;null for better performance
def addPost(self, data): # unicode json ->
js = 'if (window.READY) addPost(%s); null' % data
self.webView.evaljs(js)
# append ;null for better performance
def updatePost(self, data): # unicode json ->
js = 'if (window.READY) updatePost(%s); null' % data
self.webView.evaljs(js)
# append ;null for better performance
#def addTopic(self, data): # unicode json ->
# js = 'if (window.READY) addTopic(%s); null' % data
# self.webView.evaljs(js)
# append ;null for better performance
def updateTopic(self, data): # unicode json ->
js = 'if (window.READY) updateTopic(%s); null' % data
self.webView.evaljs(js)
class TopicView(QtWidgets.QMainWindow):
def __init__(self, parent=None):
WINDOW_FLAGS = Qt.Dialog|Qt.WindowMinMaxButtonsHint
super(TopicView, self).__init__(parent, WINDOW_FLAGS)
self.setWindowIcon(rc.icon('window-forum'))
self.setWindowTitle(tr_("Topic"))
self.__d = _TopicView(self)
def refresh(self): self.__d.refresh()
def clear(self): self.__d.clear()
def subjectId(self): return self.__d.subjectId
def setSubjectId(self, subjectId): self.__d.setSubjectId(subjectId)
def topicId(self): return self.__d.topicId
def setTopicId(self, topicId): self.__d.setTopicId(topicId)
def setVisible(self, value):
"""@reimp @public"""
if value and not self.isVisible():
self.__d.refresh()
super(TopicView, self).setVisible(value)
if not value:
self.__d.webView.clear()
self.__d.clear()
class _TopicViewManager:
def __init__(self):
self.dialogs = []
def _createDialog(self):
import windows
parent = windows.normal()
ret = TopicView(parent=parent)
ret.resize(550, 580)
return ret
def getDialog(self, topicId=0):
"""
@param* subjectId long
@param* topicId long
@return TopicView or None
"""
for w in self.dialogs:
if w.isVisible() and topicId == w.topicId():
return w
def createDialog(self, topicId=0):
"""
@param* subjectId long
@param* topicId long
@return TopicView
"""
w = self.getDialog(topicId)
if w:
w.refresh()
return w
for w in self.dialogs:
if not w.isVisible():
w.clear()
w.setTopicId(topicId)
return w
w = self._createDialog()
w.setTopicId(topicId)
self.dialogs.append(w)
return w
class TopicViewManager:
def __init__(self):
self.__d = _TopicViewManager()
#def clear(self): self.hide()
def isViewVisible(self, *args, **kwargs):
return bool(self.__d.getDialog(*args, **kwargs))
def isVisible(self):
if self.__d.dialogs:
for w in self.__d.dialogs:
if w.isVisible():
return True
return False
def hide(self):
if self.__d.dialogs:
for w in self.__d.dialogs:
if w.isVisible():
w.hide()
def show(self, topicId):
"""
@param* subjectId long subject ID
@param* topicId long chatroom topicId
"""
w = self.__d.createDialog(topicId)
w.show()
w.raise_()
@memoized
def manager():
import webrc
webrc.init()
return TopicViewManager()
#@QmlObject
#class TopicViewManagerProxy(QObject):
# def __init__(self, parent=None):
# super(TopicViewManagerProxy, self).__init__(parent)
#
# @Slot(int)
# def showTopic(self, id):
# manager().showTopic(id)
if __name__ == '__main__':
a = debug.app()
#manager().showTopic('global')
manager().showTopic(config.GLOBAL_TOPIC_ID)
a.exec_()
# EOF<|fim▁end|> | ret.setToolTip(tr_("Refresh") + " (Ctrl+R)")
#ret.setStatusTip(ret.toolTip()) |
<|file_name|>service_instance_test.go<|end_file_name|><|fim▁begin|>// Copyright 2012 tsuru authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package service
import (
"bytes"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
"runtime"
"sort"
"strconv"
"sync"
"sync/atomic"
"github.com/globalsign/mgo/bson"
"github.com/tsuru/config"
"github.com/tsuru/tsuru/action"
"github.com/tsuru/tsuru/app/bind"
"github.com/tsuru/tsuru/auth"
"github.com/tsuru/tsuru/db"
"github.com/tsuru/tsuru/db/dbtest"
"github.com/tsuru/tsuru/provision/provisiontest"
"github.com/tsuru/tsuru/router/routertest"
"github.com/tsuru/tsuru/servicemanager"
_ "github.com/tsuru/tsuru/storage/mongodb"
authTypes "github.com/tsuru/tsuru/types/auth"
"gopkg.in/check.v1"
)
type InstanceSuite struct {
conn *db.Storage
team *authTypes.Team
user *auth.User
mockTeamService *authTypes.MockTeamService
}
var _ = check.Suite(&InstanceSuite{})
func (s *InstanceSuite) SetUpSuite(c *check.C) {
var err error
config.Set("log:disable-syslog", true)
config.Set("database:url", "127.0.0.1:27017?maxPoolSize=100")
config.Set("database:name", "tsuru_service_instance_test")
s.conn, err = db.Conn()
c.Assert(err, check.IsNil)
}
func (s *InstanceSuite) SetUpTest(c *check.C) {
routertest.FakeRouter.Reset()
dbtest.ClearAllCollections(s.conn.Apps().Database)
s.user = &auth.User{Email: "[email protected]", Password: "123"}
s.team = &authTypes.Team{Name: "raul"}
err := s.conn.Users().Insert(s.user)
c.Assert(err, check.IsNil)
s.mockTeamService = &authTypes.MockTeamService{
OnFindByName: func(name string) (*authTypes.Team, error) {
if name == s.team.Name {
return s.team, nil
}
return nil, authTypes.ErrTeamNotFound
},
OnFindByNames: func(names []string) ([]authTypes.Team, error) {
return []authTypes.Team{*s.team}, nil
},
}
servicemanager.Team = s.mockTeamService
}
func (s *InstanceSuite) TearDownSuite(c *check.C) {
s.conn.ServiceInstances().Database.DropDatabase()
s.conn.Close()
}
func (s *InstanceSuite) TestDeleteServiceInstance(c *check.C) {
si := &ServiceInstance{Name: "MySQL"}
s.conn.ServiceInstances().Insert(&si)
evt := createEvt(c)
DeleteInstance(si, evt, "")
query := bson.M{"name": si.Name}
qtd, err := s.conn.ServiceInstances().Find(query).Count()
c.Assert(err, check.IsNil)
c.Assert(qtd, check.Equals, 0)
}
func (s *InstanceSuite) TestRetrieveAssociatedService(c *check.C) {
service := Service{Name: "my_service"}
s.conn.Services().Insert(&service)
serviceInstance := &ServiceInstance{
Name: service.Name,
ServiceName: service.Name,
}
rService := serviceInstance.Service()
c.Assert(service.Name, check.Equals, rService.Name)
}
func (s *InstanceSuite) TestFindApp(c *check.C) {
instance := ServiceInstance{
Name: "myinstance",
Apps: []string{"app1", "app2"},
}
c.Assert(instance.FindApp("app1"), check.Equals, 0)
c.Assert(instance.FindApp("app2"), check.Equals, 1)
c.Assert(instance.FindApp("what"), check.Equals, -1)
}
func (s *InstanceSuite) TestBindApp(c *check.C) {
oldBindAppDBAction := bindAppDBAction
oldBindAppEndpointAction := bindAppEndpointAction
oldSetBoundEnvsAction := setBoundEnvsAction
oldBindUnitsAction := bindUnitsAction
defer func() {
bindAppDBAction = oldBindAppDBAction
bindAppEndpointAction = oldBindAppEndpointAction
setBoundEnvsAction = oldSetBoundEnvsAction
bindUnitsAction = oldBindUnitsAction
}()
var calls []string
var params []interface{}
bindAppDBAction = &action.Action{
Forward: func(ctx action.FWContext) (action.Result, error) {
calls = append(calls, "bindAppDBAction")
params = ctx.Params
return nil, nil
},
}
bindAppEndpointAction = &action.Action{
Forward: func(ctx action.FWContext) (action.Result, error) {
calls = append(calls, "bindAppEndpointAction")
return nil, nil
},
}
setBoundEnvsAction = &action.Action{
Forward: func(ctx action.FWContext) (action.Result, error) {
calls = append(calls, "setBoundEnvsAction")
return nil, nil
},
}
bindUnitsAction = &action.Action{
Forward: func(ctx action.FWContext) (action.Result, error) {
calls = append(calls, "bindUnitsAction")
return nil, nil
},
}
var si ServiceInstance
a := provisiontest.NewFakeApp("myapp", "python", 1)
var buf bytes.Buffer
evt := createEvt(c)
err := si.BindApp(a, true, &buf, evt, "")
c.Assert(err, check.IsNil)
expectedCalls := []string{
"bindAppDBAction", "bindAppEndpointAction",
"setBoundEnvsAction", "bindUnitsAction",
}
expectedParams := []interface{}{&bindPipelineArgs{app: a, serviceInstance: &si, writer: &buf, shouldRestart: true, event: evt, requestID: ""}}
c.Assert(calls, check.DeepEquals, expectedCalls)
c.Assert(params, check.DeepEquals, expectedParams)
c.Assert(buf.String(), check.Equals, "")
}
func (s *InstanceSuite) TestGetServiceInstancesBoundToApp(c *check.C) {
srvc := Service{Name: "mysql"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
sInstance := ServiceInstance{
Name: "t3sql",
ServiceName: "mysql",
Tags: []string{},<|fim▁hole|> }
err = s.conn.ServiceInstances().Insert(&sInstance)
c.Assert(err, check.IsNil)
sInstance2 := ServiceInstance{
Name: "s9sql",
ServiceName: "mysql",
Tags: []string{},
Apps: []string{"app1"},
BoundUnits: []Unit{},
Teams: []string{},
}
err = s.conn.ServiceInstances().Insert(&sInstance2)
c.Assert(err, check.IsNil)
sInstances, err := GetServiceInstancesBoundToApp("app2")
c.Assert(err, check.IsNil)
expected := []ServiceInstance{sInstance}
c.Assert(sInstances, check.DeepEquals, expected)
sInstances, err = GetServiceInstancesBoundToApp("app1")
c.Assert(err, check.IsNil)
expected = []ServiceInstance{sInstance, sInstance2}
c.Assert(sInstances, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestGetServiceInstancesByServices(c *check.C) {
srvc := Service{Name: "mysql"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
sInstance := ServiceInstance{Name: "t3sql", ServiceName: "mysql", Teams: []string{s.team.Name}}
err = s.conn.ServiceInstances().Insert(&sInstance)
c.Assert(err, check.IsNil)
sInstance2 := ServiceInstance{Name: "s9sql", ServiceName: "mysql", Tags: []string{}}
err = s.conn.ServiceInstances().Insert(&sInstance2)
c.Assert(err, check.IsNil)
sInstances, err := GetServiceInstancesByServices([]Service{srvc})
c.Assert(err, check.IsNil)
expected := []ServiceInstance{{Name: "t3sql", ServiceName: "mysql", Tags: []string{}}, sInstance2}
c.Assert(sInstances, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestGetServiceInstancesByServicesWithoutAnyExistingServiceInstances(c *check.C) {
srvc := Service{Name: "mysql"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
sInstances, err := GetServiceInstancesByServices([]Service{srvc})
c.Assert(err, check.IsNil)
c.Assert(sInstances, check.DeepEquals, []ServiceInstance(nil))
}
func (s *InstanceSuite) TestGetServiceInstancesByServicesWithTwoServices(c *check.C) {
srvc := Service{Name: "mysql"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
srvc2 := Service{Name: "mongodb"}
err = s.conn.Services().Insert(&srvc2)
c.Assert(err, check.IsNil)
sInstance := ServiceInstance{Name: "t3sql", ServiceName: "mysql", Teams: []string{s.team.Name}}
err = s.conn.ServiceInstances().Insert(&sInstance)
c.Assert(err, check.IsNil)
sInstance2 := ServiceInstance{Name: "s9nosql", ServiceName: "mongodb", Tags: []string{"tag 1", "tag 2"}}
err = s.conn.ServiceInstances().Insert(&sInstance2)
c.Assert(err, check.IsNil)
sInstances, err := GetServiceInstancesByServices([]Service{srvc, srvc2})
c.Assert(err, check.IsNil)
expected := []ServiceInstance{{Name: "t3sql", ServiceName: "mysql", Tags: []string{}}, sInstance2}
c.Assert(sInstances, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestGenericServiceInstancesFilter(c *check.C) {
srvc := Service{Name: "mysql"}
teams := []string{s.team.Name}
query := genericServiceInstancesFilter(srvc, teams)
c.Assert(query, check.DeepEquals, bson.M{"service_name": srvc.Name, "teams": bson.M{"$in": teams}})
}
func (s *InstanceSuite) TestGenericServiceInstancesFilterWithServiceSlice(c *check.C) {
services := []Service{
{Name: "mysql"},
{Name: "mongodb"},
}
names := []string{"mysql", "mongodb"}
teams := []string{s.team.Name}
query := genericServiceInstancesFilter(services, teams)
c.Assert(query, check.DeepEquals, bson.M{"service_name": bson.M{"$in": names}, "teams": bson.M{"$in": teams}})
}
func (s *InstanceSuite) TestGenericServiceInstancesFilterWithoutSpecifingTeams(c *check.C) {
services := []Service{
{Name: "mysql"},
{Name: "mongodb"},
}
names := []string{"mysql", "mongodb"}
teams := []string{}
query := genericServiceInstancesFilter(services, teams)
c.Assert(query, check.DeepEquals, bson.M{"service_name": bson.M{"$in": names}})
}
func (s *InstanceSuite) TestAdditionalInfo(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`[{"label": "key", "value": "value"}, {"label": "key2", "value": "value2"}]`))
}))
defer ts.Close()
srvc := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
si := ServiceInstance{Name: "ql", ServiceName: srvc.Name}
info, err := si.Info("")
c.Assert(err, check.IsNil)
expected := map[string]string{
"key": "value",
"key2": "value2",
}
c.Assert(info, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestMarshalJSON(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Write([]byte(`[{"label": "key", "value": "value"}]`))
}))
defer ts.Close()
srvc := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
si := ServiceInstance{Name: "ql", ServiceName: srvc.Name}
data, err := json.Marshal(&si)
c.Assert(err, check.IsNil)
var result map[string]interface{}
err = json.Unmarshal(data, &result)
c.Assert(err, check.IsNil)
expected := map[string]interface{}{
"Id": float64(0),
"Name": "ql",
"PlanName": "",
"Teams": nil,
"Apps": nil,
"ServiceName": "mysql",
"Info": map[string]interface{}{"key": "value"},
"TeamOwner": "",
}
c.Assert(result, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestMarshalJSONWithoutInfo(c *check.C) {
srvc := Service{Name: "mysql", Endpoint: map[string]string{"production": ""}}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
si := ServiceInstance{Name: "ql", ServiceName: srvc.Name}
data, err := json.Marshal(&si)
c.Assert(err, check.IsNil)
var result map[string]interface{}
err = json.Unmarshal(data, &result)
c.Assert(err, check.IsNil)
expected := map[string]interface{}{
"Id": float64(0),
"Name": "ql",
"PlanName": "",
"Teams": nil,
"Apps": nil,
"ServiceName": "mysql",
"Info": nil,
"TeamOwner": "",
}
c.Assert(result, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestMarshalJSONWithoutEndpoint(c *check.C) {
srvc := Service{Name: "mysql"}
err := s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
si := ServiceInstance{Name: "ql", ServiceName: srvc.Name}
data, err := json.Marshal(&si)
c.Assert(err, check.IsNil)
var result map[string]interface{}
err = json.Unmarshal(data, &result)
c.Assert(err, check.IsNil)
expected := map[string]interface{}{
"Id": float64(0),
"Name": "ql",
"PlanName": "",
"Teams": nil,
"Apps": nil,
"ServiceName": "mysql",
"Info": nil,
"TeamOwner": "",
}
c.Assert(result, check.DeepEquals, expected)
}
func (s *InstanceSuite) TestDeleteInstance(c *check.C) {
h := TestHandler{}
ts := httptest.NewServer(&h)
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
si := ServiceInstance{Name: "instance", ServiceName: srv.Name}
err = s.conn.ServiceInstances().Insert(&si)
c.Assert(err, check.IsNil)
evt := createEvt(c)
err = DeleteInstance(&si, evt, "")
h.Lock()
defer h.Unlock()
c.Assert(err, check.IsNil)
l, err := s.conn.ServiceInstances().Find(bson.M{"name": si.Name}).Count()
c.Assert(err, check.IsNil)
c.Assert(l, check.Equals, 0)
c.Assert(h.url, check.Equals, "/resources/"+si.Name)
c.Assert(h.method, check.Equals, "DELETE")
}
func (s *InstanceSuite) TestDeleteInstanceWithApps(c *check.C) {
si := ServiceInstance{Name: "instance", Apps: []string{"foo"}}
err := s.conn.ServiceInstances().Insert(&si)
c.Assert(err, check.IsNil)
s.conn.ServiceInstances().Remove(bson.M{"name": si.Name})
evt := createEvt(c)
err = DeleteInstance(&si, evt, "")
c.Assert(err, check.ErrorMatches, "^This service instance is bound to at least one app. Unbind them before removing it$")
}
func (s *InstanceSuite) TestCreateServiceInstance(c *check.C) {
var requests int32
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
atomic.AddInt32(&requests, 1)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", PlanName: "small", TeamOwner: s.team.Name, Tags: []string{"tag1", "tag2"}}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.IsNil)
si, err := GetServiceInstance("mongodb", "instance")
c.Assert(err, check.IsNil)
c.Assert(atomic.LoadInt32(&requests), check.Equals, int32(1))
c.Assert(si.PlanName, check.Equals, "small")
c.Assert(si.TeamOwner, check.Equals, s.team.Name)
c.Assert(si.Teams, check.DeepEquals, []string{s.team.Name})
c.Assert(si.Tags, check.DeepEquals, []string{"tag1", "tag2"})
}
func (s *InstanceSuite) TestCreateServiceInstanceValidatesTeamOwner(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", PlanName: "small", TeamOwner: "unknown", Tags: []string{"tag1", "tag2"}}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.ErrorMatches, "Team owner doesn't exist")
}
func (s *InstanceSuite) TestCreateServiceInstanceWithSameInstanceName(c *check.C) {
var requests int32
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
atomic.AddInt32(&requests, 1)
}))
defer ts.Close()
srv := []Service{
{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"},
{Name: "mongodb2", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"},
{Name: "mongodb3", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"},
}
instance := ServiceInstance{Name: "instance", PlanName: "small", TeamOwner: s.team.Name}
evt := createEvt(c)
for _, service := range srv {
err := s.conn.Services().Insert(&service)
c.Assert(err, check.IsNil)
err = CreateServiceInstance(instance, &service, evt, "")
c.Assert(err, check.IsNil)
}
si, err := GetServiceInstance("mongodb3", "instance")
c.Assert(err, check.IsNil)
c.Assert(atomic.LoadInt32(&requests), check.Equals, int32(3))
c.Assert(si.PlanName, check.Equals, "small")
c.Assert(si.TeamOwner, check.Equals, s.team.Name)
c.Assert(si.Teams, check.DeepEquals, []string{s.team.Name})
c.Assert(si.Name, check.Equals, "instance")
c.Assert(si.ServiceName, check.Equals, "mongodb3")
err = CreateServiceInstance(instance, &srv[0], evt, "")
c.Assert(err, check.Equals, ErrInstanceNameAlreadyExists)
}
func (s *InstanceSuite) TestCreateSpecifyOwner(c *check.C) {
var requests int32
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
atomic.AddInt32(&requests, 1)
}))
defer ts.Close()
team := authTypes.Team{Name: "owner"}
s.mockTeamService.OnFindByName = func(name string) (*authTypes.Team, error) {
c.Assert(name, check.Equals, team.Name)
return &team, nil
}
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", PlanName: "small", TeamOwner: team.Name}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.IsNil)
si, err := GetServiceInstance("mongodb", "instance")
c.Assert(err, check.IsNil)
c.Assert(atomic.LoadInt32(&requests), check.Equals, int32(1))
c.Assert(si.TeamOwner, check.Equals, team.Name)
}
func (s *InstanceSuite) TestCreateServiceInstanceNoTeamOwner(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", PlanName: "small"}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.Equals, ErrTeamMandatory)
}
func (s *InstanceSuite) TestCreateServiceInstanceNameShouldBeUnique(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", TeamOwner: s.team.Name}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.IsNil)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.Equals, ErrInstanceNameAlreadyExists)
}
func (s *InstanceSuite) TestCreateServiceInstanceEndpointFailure(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusInternalServerError)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance"}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.NotNil)
count, err := s.conn.ServiceInstances().Find(bson.M{"name": "instance"}).Count()
c.Assert(err, check.IsNil)
c.Assert(count, check.Equals, 0)
}
func (s *InstanceSuite) TestCreateServiceInstanceValidatesTheName(c *check.C) {
var tests = []struct {
input string
err error
}{
{"my-service", nil},
{"my_service", nil},
{"MyService", nil},
{"a1", nil},
{"--app", ErrInvalidInstanceName},
{"123servico", ErrInvalidInstanceName},
{"a", ErrInvalidInstanceName},
{"a@123", ErrInvalidInstanceName},
}
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
evt := createEvt(c)
for _, t := range tests {
instance := ServiceInstance{Name: t.input, TeamOwner: s.team.Name}
err := CreateServiceInstance(instance, &srv, evt, "")
c.Check(err, check.Equals, t.err, check.Commentf(t.input))
}
}
func (s *InstanceSuite) TestCreateServiceInstanceRemovesDuplicatedAndEmptyTags(c *check.C) {
var requests int32
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
atomic.AddInt32(&requests, 1)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", PlanName: "small", TeamOwner: s.team.Name, Tags: []string{"", " tag1 ", "tag1", " "}}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.IsNil)
si, err := GetServiceInstance("mongodb", "instance")
c.Assert(err, check.IsNil)
c.Assert(atomic.LoadInt32(&requests), check.Equals, int32(1))
c.Assert(si.Tags, check.DeepEquals, []string{"tag1"})
}
func (s *InstanceSuite) TestUpdateServiceInstance(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", ServiceName: "mongodb", PlanName: "small", TeamOwner: s.team.Name, Tags: []string{"tag1"}, Teams: []string{s.team.Name}}
err = s.conn.ServiceInstances().Insert(instance)
c.Assert(err, check.IsNil)
var si ServiceInstance
err = s.conn.ServiceInstances().Find(bson.M{"name": "instance"}).One(&si)
c.Assert(err, check.IsNil)
newTeam := authTypes.Team{Name: "new-team-owner"}
s.mockTeamService.OnFindByName = func(name string) (*authTypes.Team, error) {
c.Assert(name, check.Equals, newTeam.Name)
return &newTeam, nil
}
si.Description = "desc"
si.Tags = []string{"tag2"}
si.TeamOwner = newTeam.Name
evt := createEvt(c)
err = instance.Update(srv, si, evt, "")
c.Assert(err, check.IsNil)
err = s.conn.ServiceInstances().Find(bson.M{"name": "instance"}).One(&si)
c.Assert(err, check.IsNil)
c.Assert(si.PlanName, check.Equals, "small")
c.Assert(si.Description, check.Equals, "desc")
c.Assert(si.Tags, check.DeepEquals, []string{"tag2"})
c.Assert(si.TeamOwner, check.Equals, newTeam.Name)
c.Assert(si.Teams, check.DeepEquals, []string{s.team.Name, newTeam.Name})
}
func (s *InstanceSuite) TestUpdateServiceInstanceValidatesTeamOwner(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", ServiceName: "mongodb", PlanName: "small", TeamOwner: s.team.Name, Tags: []string{"tag1"}}
evt := createEvt(c)
err = CreateServiceInstance(instance, &srv, evt, "")
c.Assert(err, check.IsNil)
var si ServiceInstance
err = s.conn.ServiceInstances().Find(bson.M{"name": "instance"}).One(&si)
c.Assert(err, check.IsNil)
si.TeamOwner = "unknown"
err = instance.Update(srv, si, evt, "")
c.Assert(err, check.ErrorMatches, "Team owner doesn't exist")
}
func (s *InstanceSuite) TestUpdateServiceInstanceRemovesDuplicatedAndEmptyTags(c *check.C) {
var requests int32
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
atomic.AddInt32(&requests, 1)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t"}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
instance := ServiceInstance{Name: "instance", ServiceName: "mongodb", PlanName: "small", TeamOwner: s.team.Name, Tags: []string{"tag1"}, Teams: []string{s.team.Name}}
err = s.conn.ServiceInstances().Insert(instance)
c.Assert(err, check.IsNil)
instance.Tags = []string{"tag2", " ", " tag2 "}
evt := createEvt(c)
err = instance.Update(srv, instance, evt, "")
c.Assert(err, check.IsNil)
c.Assert(atomic.LoadInt32(&requests), check.Equals, int32(1))
var si ServiceInstance
err = s.conn.ServiceInstances().Find(bson.M{"name": "instance"}).One(&si)
c.Assert(err, check.IsNil)
c.Assert(si.Tags, check.DeepEquals, []string{"tag2"})
}
func (s *InstanceSuite) TestStatus(c *check.C) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
srv := Service{Name: "mongodb", Endpoint: map[string]string{"production": ts.URL}}
err := s.conn.Services().Insert(&srv)
c.Assert(err, check.IsNil)
si := ServiceInstance{Name: "instance", ServiceName: srv.Name}
status, err := si.Status("")
c.Assert(err, check.IsNil)
c.Assert(status, check.Equals, "up")
}
func (s *InstanceSuite) TestGetServiceInstance(c *check.C) {
s.conn.ServiceInstances().Insert(
ServiceInstance{Name: "mongo-1", ServiceName: "mongodb", Teams: []string{s.team.Name}},
ServiceInstance{Name: "mongo-2", ServiceName: "mongodb", Teams: []string{s.team.Name}},
ServiceInstance{Name: "mongo-3", ServiceName: "mongodb", Teams: []string{s.team.Name}},
ServiceInstance{Name: "mongo-4", ServiceName: "mongodb", Teams: []string{s.team.Name}},
ServiceInstance{Name: "mongo-5", ServiceName: "mongodb"},
)
instance, err := GetServiceInstance("mongodb", "mongo-1")
c.Assert(err, check.IsNil)
c.Assert(instance.Name, check.Equals, "mongo-1")
c.Assert(instance.ServiceName, check.Equals, "mongodb")
c.Assert(instance.Teams, check.DeepEquals, []string{s.team.Name})
instance, err = GetServiceInstance("mongodb", "mongo-6")
c.Assert(instance, check.IsNil)
c.Assert(err, check.Equals, ErrServiceInstanceNotFound)
instance, err = GetServiceInstance("mongodb", "mongo-5")
c.Assert(err, check.IsNil)
c.Assert(instance.Name, check.Equals, "mongo-5")
}
func (s *InstanceSuite) TestGetIdentfier(c *check.C) {
srv := ServiceInstance{Name: "mongodb"}
identifier := srv.GetIdentifier()
c.Assert(identifier, check.Equals, srv.Name)
srv.Id = 10
identifier = srv.GetIdentifier()
c.Assert(identifier, check.Equals, strconv.Itoa(srv.Id))
}
func (s *InstanceSuite) TestGrantTeamToInstance(c *check.C) {
user := &auth.User{Email: "[email protected]", Password: "123"}
err := s.conn.Users().Insert(user)
c.Assert(err, check.IsNil)
team := authTypes.Team{Name: "test2"}
s.mockTeamService.OnFindByName = func(name string) (*authTypes.Team, error) {
c.Assert(name, check.Equals, team.Name)
return &team, nil
}
srvc := Service{Name: "mysql", Teams: []string{team.Name}, IsRestricted: false}
err = s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
sInstance := ServiceInstance{
Name: "j4sql",
ServiceName: srvc.Name,
}
err = s.conn.ServiceInstances().Insert(&sInstance)
c.Assert(err, check.IsNil)
sInstance.Grant(team.Name)
si, err := GetServiceInstance("mysql", "j4sql")
c.Assert(err, check.IsNil)
c.Assert(si.Teams, check.DeepEquals, []string{"test2"})
}
func (s *InstanceSuite) TestRevokeTeamToInstance(c *check.C) {
user := &auth.User{Email: "[email protected]", Password: "123"}
err := s.conn.Users().Insert(user)
c.Assert(err, check.IsNil)
team := authTypes.Team{Name: "test2"}
s.mockTeamService.OnFindByName = func(name string) (*authTypes.Team, error) {
c.Assert(name, check.Equals, team.Name)
return &team, nil
}
srvc := Service{Name: "mysql", Teams: []string{team.Name}, IsRestricted: false}
err = s.conn.Services().Insert(&srvc)
c.Assert(err, check.IsNil)
sInstance := ServiceInstance{
Name: "j4sql",
ServiceName: srvc.Name,
Teams: []string{team.Name},
}
err = s.conn.ServiceInstances().Insert(&sInstance)
c.Assert(err, check.IsNil)
si, err := GetServiceInstance("mysql", "j4sql")
c.Assert(err, check.IsNil)
c.Assert(si.Teams, check.DeepEquals, []string{"test2"})
sInstance.Revoke(team.Name)
si, err = GetServiceInstance("mysql", "j4sql")
c.Assert(err, check.IsNil)
c.Assert(si.Teams, check.DeepEquals, []string{})
}
func (s *InstanceSuite) TestUnbindApp(c *check.C) {
var reqs []*http.Request
var mut sync.Mutex
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mut.Lock()
defer mut.Unlock()
reqs = append(reqs, r)
w.WriteHeader(http.StatusOK)
}))
defer ts.Close()
serv := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t", OwnerTeams: []string{s.team.Name}}
err := serv.Create()
c.Assert(err, check.IsNil)
a := provisiontest.NewFakeApp("myapp", "static", 2)
si := ServiceInstance{
Name: "my-mysql",
ServiceName: "mysql",
Teams: []string{s.team.Name},
Apps: []string{a.GetName()},
}
err = s.conn.ServiceInstances().Insert(si)
c.Assert(err, check.IsNil)
err = a.AddInstance(bind.AddInstanceArgs{
Envs: []bind.ServiceEnvVar{
{EnvVar: bind.EnvVar{Name: "ENV1", Value: "VAL1"}, ServiceName: "mysql", InstanceName: "my-mysql"},
{EnvVar: bind.EnvVar{Name: "ENV2", Value: "VAL2"}, ServiceName: "mysql", InstanceName: "my-mysql"},
},
ShouldRestart: true,
})
c.Assert(err, check.IsNil)
units, err := a.Units()
c.Assert(err, check.IsNil)
for i := range units {
err = si.BindUnit(a, &units[i])
c.Assert(err, check.IsNil)
}
var buf bytes.Buffer
evt := createEvt(c)
err = si.UnbindApp(a, false, &buf, evt, "")
c.Assert(err, check.IsNil)
c.Assert(buf.String(), check.Matches, "remove instance")
c.Assert(reqs, check.HasLen, 5)
c.Assert(reqs[0].Method, check.Equals, "POST")
c.Assert(reqs[0].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[1].Method, check.Equals, "POST")
c.Assert(reqs[1].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[2].Method, check.Equals, "DELETE")
c.Assert(reqs[2].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[3].Method, check.Equals, "DELETE")
c.Assert(reqs[3].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[4].Method, check.Equals, "DELETE")
c.Assert(reqs[4].URL.Path, check.Equals, "/resources/my-mysql/bind-app")
siDB, err := GetServiceInstance("mysql", si.Name)
c.Assert(err, check.IsNil)
c.Assert(siDB.Apps, check.DeepEquals, []string{})
c.Assert(a.GetServiceEnvs(), check.DeepEquals, []bind.ServiceEnvVar{})
}
func (s *InstanceSuite) TestUnbindAppFailureInUnbindAppCall(c *check.C) {
var reqs []*http.Request
var mut sync.Mutex
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mut.Lock()
defer mut.Unlock()
reqs = append(reqs, r)
if r.Method == "DELETE" && r.URL.Path == "/resources/my-mysql/bind-app" {
w.WriteHeader(http.StatusInternalServerError)
w.Write([]byte("my unbind app err"))
return
}
w.WriteHeader(http.StatusOK)
}))
defer ts.Close()
serv := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t", OwnerTeams: []string{s.team.Name}}
err := serv.Create()
c.Assert(err, check.IsNil)
a := provisiontest.NewFakeApp("myapp", "static", 2)
si := ServiceInstance{
Name: "my-mysql",
ServiceName: "mysql",
Teams: []string{s.team.Name},
Apps: []string{a.GetName()},
}
err = s.conn.ServiceInstances().Insert(si)
c.Assert(err, check.IsNil)
err = a.AddInstance(bind.AddInstanceArgs{
Envs: []bind.ServiceEnvVar{
{EnvVar: bind.EnvVar{Name: "ENV1", Value: "VAL1"}, ServiceName: "mysql", InstanceName: "my-mysql"},
{EnvVar: bind.EnvVar{Name: "ENV2", Value: "VAL2"}, ServiceName: "mysql", InstanceName: "my-mysql"},
},
ShouldRestart: true,
})
c.Assert(err, check.IsNil)
units, err := a.Units()
c.Assert(err, check.IsNil)
for i := range units {
err = si.BindUnit(a, &units[i])
c.Assert(err, check.IsNil)
}
var buf bytes.Buffer
evt := createEvt(c)
err = si.UnbindApp(a, true, &buf, evt, "")
c.Assert(err, check.ErrorMatches, `Failed to unbind \("/resources/my-mysql/bind-app"\): invalid response: my unbind app err \(code: 500\)`)
c.Assert(buf.String(), check.Matches, "")
c.Assert(si.Apps, check.DeepEquals, []string{"myapp"})
c.Assert(reqs, check.HasLen, 7)
c.Assert(reqs[0].Method, check.Equals, "POST")
c.Assert(reqs[0].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[1].Method, check.Equals, "POST")
c.Assert(reqs[1].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[2].Method, check.Equals, "DELETE")
c.Assert(reqs[2].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[3].Method, check.Equals, "DELETE")
c.Assert(reqs[3].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[4].Method, check.Equals, "DELETE")
c.Assert(reqs[4].URL.Path, check.Equals, "/resources/my-mysql/bind-app")
c.Assert(reqs[5].Method, check.Equals, "POST")
c.Assert(reqs[5].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[6].Method, check.Equals, "POST")
c.Assert(reqs[6].URL.Path, check.Equals, "/resources/my-mysql/bind")
siDB, err := GetServiceInstance(si.ServiceName, si.Name)
c.Assert(err, check.IsNil)
c.Assert(siDB.Apps, check.DeepEquals, []string{"myapp"})
c.Assert(a.GetServiceEnvs(), check.DeepEquals, []bind.ServiceEnvVar{
{EnvVar: bind.EnvVar{Name: "ENV1", Value: "VAL1"}, ServiceName: "mysql", InstanceName: "my-mysql"},
{EnvVar: bind.EnvVar{Name: "ENV2", Value: "VAL2"}, ServiceName: "mysql", InstanceName: "my-mysql"},
})
}
func (s *InstanceSuite) TestUnbindAppFailureInAppEnvSet(c *check.C) {
var reqs []*http.Request
var mut sync.Mutex
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mut.Lock()
defer mut.Unlock()
reqs = append(reqs, r)
w.WriteHeader(http.StatusOK)
}))
defer ts.Close()
serv := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t", OwnerTeams: []string{s.team.Name}}
err := serv.Create()
c.Assert(err, check.IsNil)
a := provisiontest.NewFakeApp("myapp", "static", 2)
si := ServiceInstance{
Name: "my-mysql",
ServiceName: "mysql",
Teams: []string{s.team.Name},
Apps: []string{a.GetName()},
}
err = s.conn.ServiceInstances().Insert(si)
c.Assert(err, check.IsNil)
units, err := a.Units()
c.Assert(err, check.IsNil)
for i := range units {
err = si.BindUnit(a, &units[i])
c.Assert(err, check.IsNil)
}
var buf bytes.Buffer
evt := createEvt(c)
err = si.UnbindApp(a, true, &buf, evt, "")
c.Assert(err, check.ErrorMatches, `instance not found`)
c.Assert(buf.String(), check.Matches, "")
c.Assert(si.Apps, check.DeepEquals, []string{"myapp"})
c.Assert(reqs, check.HasLen, 8)
c.Assert(reqs[0].Method, check.Equals, "POST")
c.Assert(reqs[0].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[1].Method, check.Equals, "POST")
c.Assert(reqs[1].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[2].Method, check.Equals, "DELETE")
c.Assert(reqs[2].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[3].Method, check.Equals, "DELETE")
c.Assert(reqs[3].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[4].Method, check.Equals, "DELETE")
c.Assert(reqs[4].URL.Path, check.Equals, "/resources/my-mysql/bind-app")
c.Assert(reqs[5].Method, check.Equals, "POST")
c.Assert(reqs[5].URL.Path, check.Equals, "/resources/my-mysql/bind-app")
c.Assert(reqs[6].Method, check.Equals, "POST")
c.Assert(reqs[6].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[7].Method, check.Equals, "POST")
c.Assert(reqs[7].URL.Path, check.Equals, "/resources/my-mysql/bind")
siDB, err := GetServiceInstance(si.ServiceName, si.Name)
c.Assert(err, check.IsNil)
c.Assert(siDB.Apps, check.DeepEquals, []string{"myapp"})
}
func (s *InstanceSuite) TestBindAppFullPipeline(c *check.C) {
var reqs []*http.Request
var mut sync.Mutex
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mut.Lock()
defer mut.Unlock()
reqs = append(reqs, r)
w.WriteHeader(http.StatusOK)
if r.URL.Path == "/resources/my-mysql/bind-app" && r.Method == "POST" {
w.Write([]byte(`{"ENV1": "VAL1", "ENV2": "VAL2"}`))
}
}))
defer ts.Close()
serv := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t", OwnerTeams: []string{s.team.Name}}
err := serv.Create()
c.Assert(err, check.IsNil)
si := ServiceInstance{
Name: "my-mysql",
ServiceName: "mysql",
Teams: []string{s.team.Name},
}
err = s.conn.ServiceInstances().Insert(si)
c.Assert(err, check.IsNil)
a := provisiontest.NewFakeApp("myapp", "static", 2)
var buf bytes.Buffer
evt := createEvt(c)
err = si.BindApp(a, true, &buf, evt, "")
c.Assert(err, check.IsNil)
c.Assert(buf.String(), check.Matches, "add instance")
c.Assert(reqs, check.HasLen, 3)
c.Assert(reqs[0].Method, check.Equals, "POST")
c.Assert(reqs[0].URL.Path, check.Equals, "/resources/my-mysql/bind-app")
c.Assert(reqs[1].Method, check.Equals, "POST")
c.Assert(reqs[1].URL.Path, check.Equals, "/resources/my-mysql/bind")
c.Assert(reqs[2].Method, check.Equals, "POST")
c.Assert(reqs[2].URL.Path, check.Equals, "/resources/my-mysql/bind")
siDB, err := GetServiceInstance(si.ServiceName, si.Name)
c.Assert(err, check.IsNil)
c.Assert(siDB.Apps, check.DeepEquals, []string{"myapp"})
c.Assert(a.GetServiceEnvs(), check.DeepEquals, []bind.ServiceEnvVar{
{EnvVar: bind.EnvVar{Name: "ENV1", Value: "VAL1"}, ServiceName: "mysql", InstanceName: "my-mysql"},
{EnvVar: bind.EnvVar{Name: "ENV2", Value: "VAL2"}, ServiceName: "mysql", InstanceName: "my-mysql"},
})
}
func (s *InstanceSuite) TestBindAppMultipleApps(c *check.C) {
goMaxProcs := runtime.GOMAXPROCS(4)
defer runtime.GOMAXPROCS(goMaxProcs)
var reqs []*http.Request
var mut sync.Mutex
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mut.Lock()
defer mut.Unlock()
reqs = append(reqs, r)
w.WriteHeader(http.StatusOK)
if r.URL.Path == "/resources/my-mysql/bind-app" && r.Method == "POST" {
w.Write([]byte(`{"ENV1": "VAL1", "ENV2": "VAL2"}`))
}
}))
defer ts.Close()
serv := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t", OwnerTeams: []string{s.team.Name}}
err := serv.Create()
c.Assert(err, check.IsNil)
si := ServiceInstance{
Name: "my-mysql",
ServiceName: "mysql",
Teams: []string{s.team.Name},
}
err = s.conn.ServiceInstances().Insert(si)
c.Assert(err, check.IsNil)
var apps []bind.App
var expectedNames []string
for i := 0; i < 100; i++ {
name := fmt.Sprintf("myapp-%02d", i)
expectedNames = append(expectedNames, name)
apps = append(apps, provisiontest.NewFakeApp(name, "static", 2))
}
evt := createEvt(c)
wg := sync.WaitGroup{}
for _, app := range apps {
wg.Add(1)
go func(app bind.App) {
defer wg.Done()
var buf bytes.Buffer
bindErr := si.BindApp(app, true, &buf, evt, "")
c.Assert(bindErr, check.IsNil)
}(app)
}
wg.Wait()
c.Assert(reqs, check.HasLen, 300)
siDB, err := GetServiceInstance(si.ServiceName, si.Name)
c.Assert(err, check.IsNil)
sort.Strings(siDB.Apps)
c.Assert(siDB.Apps, check.DeepEquals, expectedNames)
}
func (s *InstanceSuite) TestUnbindAppMultipleApps(c *check.C) {
originalMaxProcs := runtime.GOMAXPROCS(4)
defer runtime.GOMAXPROCS(originalMaxProcs)
var reqs []*http.Request
var mut sync.Mutex
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
mut.Lock()
defer mut.Unlock()
reqs = append(reqs, r)
w.WriteHeader(http.StatusOK)
if r.URL.Path == "/resources/my-mysql/bind-app" && r.Method == "POST" {
w.Write([]byte(`{"ENV1": "VAL1", "ENV2": "VAL2"}`))
}
}))
defer ts.Close()
serv := Service{Name: "mysql", Endpoint: map[string]string{"production": ts.URL}, Password: "s3cr3t", OwnerTeams: []string{s.team.Name}}
err := serv.Create()
c.Assert(err, check.IsNil)
si := ServiceInstance{
Name: "my-mysql",
ServiceName: "mysql",
Teams: []string{s.team.Name},
}
err = s.conn.ServiceInstances().Insert(si)
c.Assert(err, check.IsNil)
var apps []bind.App
evt := createEvt(c)
for i := 0; i < 20; i++ {
name := fmt.Sprintf("myapp-%02d", i)
app := provisiontest.NewFakeApp(name, "static", 2)
apps = append(apps, app)
var buf bytes.Buffer
err = si.BindApp(app, true, &buf, evt, "")
c.Assert(err, check.IsNil)
}
siDB, err := GetServiceInstance(si.ServiceName, si.Name)
c.Assert(err, check.IsNil)
wg := sync.WaitGroup{}
for _, app := range apps {
wg.Add(1)
go func(app bind.App) {
defer wg.Done()
var buf bytes.Buffer
unbindErr := siDB.UnbindApp(app, false, &buf, evt, "")
c.Assert(unbindErr, check.IsNil)
}(app)
}
wg.Wait()
c.Assert(reqs, check.HasLen, 120)
siDB, err = GetServiceInstance(si.ServiceName, si.Name)
c.Assert(err, check.IsNil)
sort.Strings(siDB.Apps)
c.Assert(siDB.Apps, check.DeepEquals, []string{})
}
func (s *S) TestRenameServiceInstanceTeam(c *check.C) {
sInstances := []ServiceInstance{
{Name: "si1", ServiceName: "mysql", Teams: []string{"team1", "team2", "team3"}, TeamOwner: "team1"},
{Name: "si2", ServiceName: "mysql", Teams: []string{"team1", "team3"}, TeamOwner: "team2"},
{Name: "si3", ServiceName: "mysql", Teams: []string{"team2", "team3"}, TeamOwner: "team3"},
}
for _, si := range sInstances {
err := s.conn.ServiceInstances().Insert(&si)
c.Assert(err, check.IsNil)
}
err := RenameServiceInstanceTeam("team2", "team9000")
c.Assert(err, check.IsNil)
var dbInstances []ServiceInstance
err = s.conn.ServiceInstances().Find(nil).Sort("name").All(&dbInstances)
c.Assert(err, check.IsNil)
c.Assert(dbInstances, check.DeepEquals, []ServiceInstance{
{Name: "si1", ServiceName: "mysql", Teams: []string{"team1", "team3", "team9000"}, TeamOwner: "team1", Apps: []string{}, BoundUnits: []Unit{}, Tags: []string{}},
{Name: "si2", ServiceName: "mysql", Teams: []string{"team1", "team3"}, TeamOwner: "team9000", Apps: []string{}, BoundUnits: []Unit{}, Tags: []string{}},
{Name: "si3", ServiceName: "mysql", Teams: []string{"team3", "team9000"}, TeamOwner: "team3", Apps: []string{}, BoundUnits: []Unit{}, Tags: []string{}},
})
}
func (s *S) TestProxyInstance(c *check.C) {
var remoteReq *http.Request
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
remoteReq = r
w.WriteHeader(http.StatusNoContent)
}))
defer ts.Close()
service := Service{
Name: "tensorflow",
Endpoint: map[string]string{"production": ts.URL},
Password: "abcde",
}
err := s.conn.Services().Insert(service)
c.Assert(err, check.IsNil)
sInstance := ServiceInstance{Name: "noflow", ServiceName: "tensorflow", Teams: []string{s.team.Name}}
err = s.conn.ServiceInstances().Insert(&sInstance)
c.Assert(err, check.IsNil)
tests := []struct {
method string
path string
expectedPath string
err string
}{
{method: "GET", path: "", expectedPath: "/resources/noflow"},
{method: "GET", path: "/", expectedPath: "/resources/noflow"},
{method: "GET", path: "/resources/noflow", expectedPath: "/resources/noflow"},
{method: "GET", path: "/resources/noflow/", expectedPath: "/resources/noflow"},
{method: "GET", path: "/resources/noflowxxx", expectedPath: "/resources/noflow/resources/noflowxxx"},
{method: "POST", path: "", err: "proxy request POST \"\" is forbidden"},
{method: "POST", path: "bind-app", err: "proxy request POST \"bind-app\" is forbidden"},
{method: "POST", path: "/bind-app", err: "proxy request POST \"bind-app\" is forbidden"},
{method: "GET", path: "/bind-app", expectedPath: "/resources/noflow/bind-app"},
{method: "GET", path: "/resources/noflow/bind-app", expectedPath: "/resources/noflow/bind-app"},
{method: "POST", path: "/resources/noflow/otherpath", expectedPath: "/resources/noflow/otherpath"},
{method: "POST", path: "/resources/otherinstance/otherpath", expectedPath: "/resources/noflow/resources/otherinstance/otherpath"},
}
evt := createEvt(c)
for _, tt := range tests {
request, err := http.NewRequest(tt.method, "", nil)
c.Assert(err, check.IsNil)
recorder := httptest.NewRecorder()
err = ProxyInstance(&sInstance, tt.path, evt, "", recorder, request)
if tt.err == "" {
c.Assert(err, check.IsNil)
c.Assert(recorder.Code, check.Equals, http.StatusNoContent)
c.Assert(remoteReq.URL.Path, check.Equals, tt.expectedPath)
} else {
c.Assert(err, check.ErrorMatches, tt.err)
}
}
}<|fim▁end|> | Teams: []string{s.team.Name},
Apps: []string{"app1", "app2"},
BoundUnits: []Unit{}, |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>def goodSegement1(badList,l,r):
sortedBadList = sorted(badList)
current =sortedBadList[0]
maxVal = 0
for i in range(len(sortedBadList)):
current = sortedBadList[i]
maxIndex = i+1
# first value
if i == 0 and l<=current<=r:
val = current - l
prev = l
print("first index value")
print("prev, current : ",prev,current)
if(val>maxVal):
maxVal = val
print("1. (s,e)",l,current)
# other middle values
elif l<=current<=r:
prev = sortedBadList[i-1]
val = current - prev
print("prev, current : ",prev,current)
if(val>maxVal):
maxVal = val
print("2. (s,e)",prev,current)
# last value
if maxIndex == len(sortedBadList) and l<=current<=r:
print("last index value")
next = r
val = next - current
if(val>maxVal):
maxVal = val<|fim▁hole|>pass
goodSegement1([2,5,8,10,3],1,12)
goodSegement1([37,7,22,15,49,60],3,48)<|fim▁end|> | print("3. (s,e)",current,next)
print("maxVal:",maxVal-1) |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | from __future__ import absolute_import
from .telegram import TelegramService |
<|file_name|>test_aningvtree.py<|end_file_name|><|fim▁begin|>import unittest
from openmdao.main.api import VariableTree, Component, Assembly
from openmdao.main.datatypes.api import Float, VarTree
class VT(VariableTree):<|fim▁hole|>
class C(Component):
x = Float(iotype='in')
out = Float(iotype='out')
def execute(self):
self.out = 2 * self.x
class A(Assembly):
vt = VarTree(VT(), iotype='in')
def configure(self):
self.add('c', C())
self.driver.workflow.add(['c'])
self.connect('vt.x', 'c.x')
self.create_passthrough('c.out')
class TestCase(unittest.TestCase):
def test_vtree(self):
a = A()
a.vt.x = 1.0
a.run()
self.assertEqual(a.out, 2.0)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | x = Float() |
<|file_name|>_mock.py<|end_file_name|><|fim▁begin|>from typing import Dict, List, Optional
from ray.tune.suggest.suggestion import Searcher, ConcurrencyLimiter
from ray.tune.suggest.search_generator import SearchGenerator
from ray.tune.trial import Trial
class _MockSearcher(Searcher):
def __init__(self, **kwargs):
self.live_trials = {}
self.counter = {"result": 0, "complete": 0}
self.final_results = []
self.stall = False
self.results = []
super(_MockSearcher, self).__init__(**kwargs)
def suggest(self, trial_id: str):
if not self.stall:
self.live_trials[trial_id] = 1
return {"test_variable": 2}
return None
def on_trial_result(self, trial_id: str, result: Dict):
self.counter["result"] += 1<|fim▁hole|> self.results += [result]
def on_trial_complete(
self, trial_id: str, result: Optional[Dict] = None, error: bool = False
):
self.counter["complete"] += 1
if result:
self._process_result(result)
if trial_id in self.live_trials:
del self.live_trials[trial_id]
def _process_result(self, result: Dict):
self.final_results += [result]
class _MockSuggestionAlgorithm(SearchGenerator):
def __init__(self, max_concurrent: Optional[int] = None, **kwargs):
self.searcher = _MockSearcher(**kwargs)
if max_concurrent:
self.searcher = ConcurrencyLimiter(
self.searcher, max_concurrent=max_concurrent
)
super(_MockSuggestionAlgorithm, self).__init__(self.searcher)
@property
def live_trials(self) -> List[Trial]:
return self.searcher.live_trials
@property
def results(self) -> List[Dict]:
return self.searcher.results<|fim▁end|> | |
<|file_name|>linear_assignment_.py<|end_file_name|><|fim▁begin|>"""
Solve the unique lowest-cost assignment problem using the
Hungarian algorithm (also known as Munkres algorithm).
"""
# Based on original code by Brain Clapper, adapted to NumPy by Gael Varoquaux.
# Heavily refactored by Lars Buitinck.
#
# TODO: a version of this algorithm has been incorporated in SciPy; use that
# when SciPy 0.17 is released.
# Copyright (c) 2008 Brian M. Clapper <[email protected]>, Gael Varoquaux
# Author: Brian M. Clapper, Gael Varoquaux
# LICENSE: BSD
import numpy as np
def linear_assignment(X):
"""Solve the linear assignment problem using the Hungarian algorithm.
The problem is also known as maximum weight matching in bipartite graphs.
The method is also known as the Munkres or Kuhn-Munkres algorithm.
Parameters
----------
X : array
The cost matrix of the bipartite graph
Returns
-------
indices : array
The pairs of (row, col) indices in the original array giving
the original ordering.
References
----------
1. http://www.public.iastate.edu/~ddoty/HungarianAlgorithm.html
2. Harold W. Kuhn. The Hungarian Method for the assignment problem.
*Naval Research Logistics Quarterly*, 2:83-97, 1955.
3. Harold W. Kuhn. Variants of the Hungarian method for assignment
problems. *Naval Research Logistics Quarterly*, 3: 253-258, 1956.
4. Munkres, J. Algorithms for the Assignment and Transportation Problems.
*Journal of the Society of Industrial and Applied Mathematics*,
5(1):32-38, March, 1957.
5. https://en.wikipedia.org/wiki/Hungarian_algorithm
"""
indices = _hungarian(X).tolist()
indices.sort()
# Re-force dtype to ints in case of empty list
indices = np.array(indices, dtype=int)
# Make sure the array is 2D with 2 columns.
# This is needed when dealing with an empty list
indices.shape = (-1, 2)
return indices
class _HungarianState(object):
"""State of one execution of the Hungarian algorithm.
Parameters
----------
cost_matrix : 2D matrix
The cost matrix. Does not have to be square.
"""
def __init__(self, cost_matrix):
cost_matrix = np.atleast_2d(cost_matrix)
# If there are more rows (n) than columns (m), then the algorithm
# will not be able to work correctly. Therefore, we
# transpose the cost function when needed. Just have to
# remember to swap the result columns back later.
transposed = (cost_matrix.shape[1] < cost_matrix.shape[0])
if transposed:
self.C = (cost_matrix.T).copy()
else:
self.C = cost_matrix.copy()
self.transposed = transposed
# At this point, m >= n.
n, m = self.C.shape
self.row_uncovered = np.ones(n, dtype=np.bool)
self.col_uncovered = np.ones(m, dtype=np.bool)
self.Z0_r = 0
self.Z0_c = 0
self.path = np.zeros((n + m, 2), dtype=int)
self.marked = np.zeros((n, m), dtype=int)
def _find_prime_in_row(self, row):
"""
Find the first prime element in the specified row. Returns
the column index, or -1 if no starred element was found.
"""
col = np.argmax(self.marked[row] == 2)
if self.marked[row, col] != 2:
col = -1
return col
def _clear_covers(self):
"""Clear all covered matrix cells"""
self.row_uncovered[:] = True
self.col_uncovered[:] = True
def _hungarian(cost_matrix):
"""The Hungarian algorithm.
Calculate the Munkres solution to the classical assignment problem and
return the indices for the lowest-cost pairings.
Parameters<|fim▁hole|> ----------
cost_matrix : 2D matrix
The cost matrix. Does not have to be square.
Returns
-------
indices : 2D array of indices
The pairs of (row, col) indices in the original array giving
the original ordering.
"""
state = _HungarianState(cost_matrix)
# No need to bother with assignments if one of the dimensions
# of the cost matrix is zero-length.
step = None if 0 in cost_matrix.shape else _step1
while step is not None:
step = step(state)
# Look for the starred columns
results = np.array(np.where(state.marked == 1)).T
# We need to swap the columns because we originally
# did a transpose on the input cost matrix.
if state.transposed:
results = results[:, ::-1]
return results
# Individual steps of the algorithm follow, as a state machine: they return
# the next step to be taken (function to be called), if any.
def _step1(state):
"""Steps 1 and 2 in the Wikipedia page."""
# Step1: For each row of the matrix, find the smallest element and
# subtract it from every element in its row.
state.C -= state.C.min(axis=1)[:, np.newaxis]
# Step2: Find a zero (Z) in the resulting matrix. If there is no
# starred zero in its row or column, star Z. Repeat for each element
# in the matrix.
for i, j in zip(*np.where(state.C == 0)):
if state.col_uncovered[j] and state.row_uncovered[i]:
state.marked[i, j] = 1
state.col_uncovered[j] = False
state.row_uncovered[i] = False
state._clear_covers()
return _step3
def _step3(state):
"""
Cover each column containing a starred zero. If n columns are covered,
the starred zeros describe a complete set of unique assignments.
In this case, Go to DONE, otherwise, Go to Step 4.
"""
marked = (state.marked == 1)
state.col_uncovered[np.any(marked, axis=0)] = False
if marked.sum() < state.C.shape[0]:
return _step4
def _step4(state):
"""
Find a noncovered zero and prime it. If there is no starred zero
in the row containing this primed zero, Go to Step 5. Otherwise,
cover this row and uncover the column containing the starred
zero. Continue in this manner until there are no uncovered zeros
left. Save the smallest uncovered value and Go to Step 6.
"""
# We convert to int as numpy operations are faster on int
C = (state.C == 0).astype(np.int)
covered_C = C * state.row_uncovered[:, np.newaxis]
covered_C *= state.col_uncovered.astype(dtype=np.int, copy=False)
n = state.C.shape[0]
m = state.C.shape[1]
while True:
# Find an uncovered zero
row, col = np.unravel_index(np.argmax(covered_C), (n, m))
if covered_C[row, col] == 0:
return _step6
else:
state.marked[row, col] = 2
# Find the first starred element in the row
star_col = np.argmax(state.marked[row] == 1)
if not state.marked[row, star_col] == 1:
# Could not find one
state.Z0_r = row
state.Z0_c = col
return _step5
else:
col = star_col
state.row_uncovered[row] = False
state.col_uncovered[col] = True
covered_C[:, col] = C[:, col] * (
state.row_uncovered.astype(dtype=np.int, copy=False))
covered_C[row] = 0
def _step5(state):
"""
Construct a series of alternating primed and starred zeros as follows.
Let Z0 represent the uncovered primed zero found in Step 4.
Let Z1 denote the starred zero in the column of Z0 (if any).
Let Z2 denote the primed zero in the row of Z1 (there will always be one).
Continue until the series terminates at a primed zero that has no starred
zero in its column. Unstar each starred zero of the series, star each
primed zero of the series, erase all primes and uncover every line in the
matrix. Return to Step 3
"""
count = 0
path = state.path
path[count, 0] = state.Z0_r
path[count, 1] = state.Z0_c
while True:
# Find the first starred element in the col defined by
# the path.
row = np.argmax(state.marked[:, path[count, 1]] == 1)
if not state.marked[row, path[count, 1]] == 1:
# Could not find one
break
else:
count += 1
path[count, 0] = row
path[count, 1] = path[count - 1, 1]
# Find the first prime element in the row defined by the
# first path step
col = np.argmax(state.marked[path[count, 0]] == 2)
if state.marked[row, col] != 2:
col = -1
count += 1
path[count, 0] = path[count - 1, 0]
path[count, 1] = col
# Convert paths
for i in range(count + 1):
if state.marked[path[i, 0], path[i, 1]] == 1:
state.marked[path[i, 0], path[i, 1]] = 0
else:
state.marked[path[i, 0], path[i, 1]] = 1
state._clear_covers()
# Erase all prime markings
state.marked[state.marked == 2] = 0
return _step3
def _step6(state):
"""
Add the value found in Step 4 to every element of each covered row,
and subtract it from every element of each uncovered column.
Return to Step 4 without altering any stars, primes, or covered lines.
"""
# the smallest uncovered value in the matrix
if np.any(state.row_uncovered) and np.any(state.col_uncovered):
minval = np.min(state.C[state.row_uncovered], axis=0)
minval = np.min(minval[state.col_uncovered])
state.C[np.logical_not(state.row_uncovered)] += minval
state.C[:, state.col_uncovered] -= minval
return _step4<|fim▁end|> | |
<|file_name|>cache.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Sachi King
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
import math
import os
import re
#from oslo_config import cfg
from smashcache.cache import filler
from smashcache.pages import errors
opts = [
cfg.StrOpt('chunk_storage_path',
default='/tmp/smashcache',
help="Location to download chunked target data"),
cfg.IntOpt('chunk_size',
default=8,
help="Size in megabytes to chunk at a time"),
cfg.StrOpt('proxy_host_url',
help="URL to remote host")
]
UNITS_Ki = 1024
UNITS_Mi = 1024 ** 2
#CONF = cfg.CONF
#CONF.register_opts(opts)
#CONF(project='smashcache', default_config_files=None)
# Yes commenting out in git... :(
# uwsgi, which I wanted to use, freaked out with oslo config, so fake it
class conf(object):
chunk_storage_path = '/tmp/smashcache'
chunk_size = 8
proxy_host_url = 'http://s3.amz.com/place'
CONF = conf
CHUNKSIZE = CONF.chunk_size * UNITS_Mi
class CacheObject(object):
"""Storage of known objects"""
path_file_re = re.compile('^\/(.+/)?(.+\..+)$')
def __init__(self, object_uri):
if not isinstance(object_uri, str):
raise errors.error500()
r = self.path_file_re.match(object_uri)<|fim▁hole|> object_name = r.group(2)
else:
print("Invalid file name %s" % object_uri)
raise errors.error404()
self.origin_url = (CONF.proxy_host_url + object_uri)
self._headerValues()
self._ensurePathsExist(object_path)
self.stored_object_path = ("%s/%s/%s" % (CONF.chunk_storage_path,
object_path, object_name))
self.total_chunks = math.ceil(self.object_size / CHUNKSIZE)
self.last_chunk_size = (self.object_size -
(self.total_chunks - 1) * CHUNKSIZE)
self.chunks = []
self.chunk_load = []
for _ in range(self.total_chunks):
self.chunks.append(False)
self.chunk_load.append(False)
def _ensurePathsExist(self, object_path):
# TODO: Directory transversal
paths = ['']
if object_path != '':
paths.extend(object_path.strip('/').split('/'))
path = CONF.chunk_storage_path
for p in paths:
path = ("%s/%s" % (path, p))
if not os.path.exists(path):
os.makedirs(path)
def _headerValues(self):
upstream_headers = filler.getHeaders(self.origin_url)
self.object_size = int(upstream_headers.get('content-length'))
self.content_type = upstream_headers.get('content-type')
if not self.object_size:
raise errors.error502()
def getRangeIterable(self, byte_start, byte_end):
initial_chunk = math.floor(byte_start / CHUNKSIZE)
current_chunk = initial_chunk
start_offset = byte_start - initial_chunk * CHUNKSIZE
total_bytes = byte_end - byte_start
remaining_bytes = total_bytes
max_read_bytes = 256 * UNITS_Ki
bytes_to_read = max_read_bytes
while True:
if remaining_bytes == 0:
break
self.getOrWaitChunk(current_chunk)
with open(self._chunk_path(current_chunk), 'rb') as f:
if current_chunk == initial_chunk:
f.seek(start_offset)
while True:
if remaining_bytes < max_read_bytes:
bytes_to_read = remaining_bytes
read_bytes = f.read(bytes_to_read)
remaining_bytes -= len(read_bytes)
yield read_bytes
if len(read_bytes) != max_read_bytes:
current_chunk += 1
break
def getOrWaitChunk(self, chunk_number):
if not self.chunks[chunk_number] and not self.chunk_load[chunk_number]:
self.chunk_load[chunk_number] = True
self._fetchChunk(chunk_number)
self.chunks[chunk_number] = True
elif self.chunks[chunk_number]:
pass
elif self.chunk_load[chunk_number]:
while not self.chunks[chunk_number]:
eventlet.sleep()
else:
raise errors.error500()
def _fetchChunk(self, chunk_number):
byte_range = (chunk_number * CHUNKSIZE,
(chunk_number + 1) * CHUNKSIZE - 1)
if self._validChunkExists(chunk_number):
return
filler.fetchRangeToFile(self.origin_url, byte_range,
self._chunk_path(chunk_number))
def _validChunkExists(self, chunk_number):
chunk_path = self._chunk_path(chunk_number)
expected_size = CHUNKSIZE
if chunk_number == self.total_chunks - 1:
expected_size = self.last_chunk_size
return (os.path.isfile(chunk_path) and
os.path.getsize(chunk_path) == expected_size)
def _chunk_path(self, chunk_number):
return ("%s.%s" % (self.stored_object_path, chunk_number))
class Cache(object):
def __init__(self):
self.objects = {}
def headers(self, uri):
if uri not in self.objects.keys():
self.objects[uri] = CacheObject(uri)
return [('Content-Type', self.objects[uri].content_type)]
def headersContentLength(self, uri):
if uri not in self.objects.keys():
self.objects[uri] = CacheObject(uri)
return [('Content-Length', str(self.objects[uri].object_size))]
def getIterator(self, uri, headers, start=0, end=None):
if uri not in self.objects.keys():
self.objects[uri] = CacheObject(uri)
if not end or end > self.objects[uri].object_size:
end = self.objects[uri].object_size
if start > end:
raise errors.error400()
if start == 0 and end == self.objects[uri].object_size:
content_length = self.objects[uri].object_size
else:
# Sigh, so because 0 is "send first byte" and there are 20 bytes
# we're sending bytes 0-19. If we tell chrome we are sending
# 0-20, that's 21 bytes and chrome freaks and sends a load of RST
#
# Todo: Look into how I handle send bytes and try to make this
# less of a case-by-case modification to headers. It's confusing
headers.extend([('Content-Range', ("bytes %s-%s/%s" %
(start, end-1, self.objects[uri].object_size)))])
content_length = end - start
headers.extend([('Content-Length', str(content_length))])
return self.objects[uri].getRangeIterable(start, end)<|fim▁end|> | if r:
object_path = "" if r.group(1) is None else r.group(1) |
<|file_name|>BeachAccess.js<|end_file_name|><|fim▁begin|>import React from 'react';
import pure from 'recompose/pure';
import SvgIcon from 'material-ui/SvgIcon';
let BeachAccess = props =>
<SvgIcon {...props}>
<path d="M13.127 14.56l1.43-1.43 6.44 6.443L19.57 21zm4.293-5.73l2.86-2.86c-3.95-3.95-10.35-3.96-14.3-.02 3.93-1.3 8.31-.25 11.44 2.88zM5.95 5.98c-3.94 3.95-3.93 10.35.02 14.3l2.86-2.86C5.7 14.29 4.65 9.91 5.95 5.98zm.02-.02l-.01.01c-.38 3.01 1.17 6.88 4.3 10.02l5.73-5.73c-3.13-3.13-7.01-4.68-10.02-4.3z" />
</SvgIcon>;<|fim▁hole|>BeachAccess.muiName = 'SvgIcon';
export default BeachAccess;<|fim▁end|> |
BeachAccess = pure(BeachAccess); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.