prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>minus.js<|end_file_name|><|fim▁begin|>"use strict";
Object.defineProperty(exports, "__esModule", {<|fim▁hole|> value: true
});
var minus = exports.minus = { "viewBox": "0 0 20 20", "children": [{ "name": "path", "attribs": { "d": "M16,10c0,0.553-0.048,1-0.601,1H4.601C4.049,11,4,10.553,4,10c0-0.553,0.049-1,0.601-1h10.799C15.952,9,16,9.447,16,10z" } }] };<|fim▁end|> | |
<|file_name|>regex_matcherator_naturectr.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
patterns = [r'^.*?/bc_jpg_makerDrop/(crop_fullsize_pad_center)/?.*?/(\d{9}(.*?))\.(.*?)$',<|fim▁hole|>
strings = ["/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_anchor/346470409.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_center/346470408_1.jpg",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_anchor/346470407_alt01.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bc_jpg_makerDrop/crop_fullsize_pad_center/346470406_1.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_anchor/346880405.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_center/346470404_1.jpg",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_center/346470403.png",
"/mnt/Post_Complete/Complete_to_Load/nature_center/bfly_jpg_makerDrop/crop_fullsize_anchor/336470402.jpg"]*10
def matches_pattern(str, patterns):
for pattern in patterns:
if pattern.match(str):
return pattern.match(str), pattern
return False
def regex_matcherator(strings,patterns):
import re
compiled_patterns = list(map(re.compile, patterns))
for s in strings:
if matches_pattern(s, compiled_patterns):
print matches_pattern(s, compiled_patterns)[1].pattern
print '--'.join(s.split('/')[-2:])
print matches_pattern(s, compiled_patterns)[0].groups()
print '\n'
r = regex_matcherator(strings,patterns)
#print r.next()<|fim▁end|> | r'^.*?/bc_jpg_makerDrop/(crop_fullsize_pad_anchor)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bfly_jpg_makerDrop/(crop_fullsize_center)/?.*?/(\d{9}(.*?))\.(.*?)$',
r'^.*?/bfly_jpg_makerDrop/(crop_fullsize_anchor)/?.*?/(\d{9}(.*?))\.(.*?)$']*10 |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>pub struct WordProblem {
text: String,
}
impl WordProblem {
pub fn new(command: &str) -> Self {
WordProblem {
text: command.to_string(),
}
}
pub fn answer(&self) -> Result<i32,()> {
if !self.text.starts_with("What") {
return Err(());
}
let mut temp = self.text[8..self.text.len()-1]
.split_whitespace().map(|s| s.to_string()).collect::<Vec<_>>();
temp.reverse();
let mut sum = temp.pop().unwrap().parse::<i32>().unwrap();
let mut operation = String::from("");
while let Some(x) = temp.pop() {
match x.as_str() {
"plus" | "minus" | "divided" | "multiplied" => {
operation = x.to_string();
continue;
},
"by" => continue,
&_ => {}
}
match x.parse::<i32>() {
Ok(d) => match operation.as_str() {
"plus" => sum += d,
"minus" => sum -= d,
"divided" => sum /= d,
"multiplied" => sum *= d,
&_ => {}
},
_ => return Err(()),
}
}
<|fim▁hole|>}<|fim▁end|> | Ok(sum)
} |
<|file_name|>RandomGroupIdAdapter.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2012 Sevket Seref Arikan, David Ingram
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package uk.ac.ucl.chime.web;
import gov.nih.nci.cagrid.gums.client.GetGridProxy;
import java.util.ArrayList;
import javax.el.ELContext;
import javax.faces.context.FacesContext;
import javax.faces.model.SelectItem;
import org.apache.poi.hssf.record.formula.Ptg;
import uk.ac.ucl.chime.utilities.TextValueInfo;
import uk.ac.ucl.chime.utils.RMDataTypeAdapter;
import uk.ac.ucl.chime.wrappers.archetypewrappers.ArchetypeWrapper;
/*
* This class descends from RMDataTypeAdapter to use its syntax resolving mechanism
* it is not an adapter for a data type operation, instead it provides access to a groupId
* using the node path as key. so same nodeId from a set of components gets back a random guid
* everytime the request level bean is initialized. this is not a nice trick, but JSF does not leave much choice in this case.
*
*/
public class RandomGroupIdAdapter extends RMDataTypeAdapter {
public RandomGroupIdAdapter(ArchetypeWrapper pArchetypeWrapper) {
archetypeWrapper = pArchetypeWrapper;
}
@Override<|fim▁hole|> }
@Override
protected void setValue(String nodePath, Object value) throws Exception {
//simply ignore set value
}
}<|fim▁end|> | protected Object getValue() throws Exception {
ELContext elContext = FacesContext.getCurrentInstance().getELContext();
ConnectorBean connector = (ConnectorBean) FacesContext.getCurrentInstance().getApplication().getELResolver().getValue(elContext, null, "connectorBean");
return connector.getGroupGUID(targetNodePath); |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/**
* React Static Boilerplate
* https://github.com/koistya/react-static-boilerplate
* Copyright (c) Konstantin Tarkus (@koistya) | MIT license
*/
import './index.scss'
import React, { Component } from 'react'
// import { Grid, Col, Row } from 'react-bootstrap';
export default class IndexPage extends Component {
render() {
return (
<div className="top-page">
<div>
<img
className="top-image"
src="/cover2.jpg"
width="100%"
alt="cover image"
/>
</div>
<div className="top-page--footer">
The source code of this website is available
<a
href="https://github.com/odoruinu/odoruinu.net-pug"
target="_blank"
rel="noopener noreferrer"<|fim▁hole|> </div>
</div>
)
}
}<|fim▁end|> | >
here on GitHub
</a>
. |
<|file_name|>TeamDao.java<|end_file_name|><|fim▁begin|>/**********************************************************************
Copyright (c) 2009 Stefan Seelmann. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**********************************************************************/
package com.example.dao;
import java.util.Collection;
import com.example.Team;
public class TeamDao extends AbstractDao<Team>
{
public Collection<Team> findByName( String name )
{
return super.findByQuery( "name.startsWith(s1)", "java.lang.String s1", name );<|fim▁hole|>
public Team loadWithUsers( Object id )
{
return super.load( id, "users" );
}
public Team load( Object id )
{
return super.load( id );
}
public Collection<Team> loadAll()
{
return super.loadAll();
}
}<|fim▁end|> | }
|
<|file_name|>basic-information.js<|end_file_name|><|fim▁begin|>var xue =xue || {};
xue.formCheck = xue.formCheck || {};
var fCheck = xue.formCheck;
/* 提示信息的css样式 */
fCheck.setTips = function(select, tips){
$(select).css({
'display': 'block',
}).html(tips);
};
/* 输入正确时,清除提醒 */
fCheck.clearTips = function(select){
$(select).css({
'display':'none'
}).html(null);
};
/* 边框样式 */
fCheck.bordercss = function(argument) {
if($(argument).val() !== ''){
$(argument).css('border','1px solid #68c04a');
}else{$(argument).css('border','1px solid #d2d2d2');}
}
/* 验证昵称 */
$(function(){
var nickname = $('.nickname');
$(nickname).on('focus',function(){
nickname.data('lastVal', $.trim(nickname.val()));
$('.prompt-empty').html('请输入不超过6个汉字、18个字母或18个数字').css({
color: '#999',
display: 'block'
});
$(".nickname-warning").css({
display: 'none',
});
});
$(nickname).on('blur',function(){
fCheck.clearTips(".prompt-empty");
if (nickname.val() == '') {
$(".nickname-warning").html('请输入昵称').css({
display: 'block',
});
}else{
if(nickname.data('lastVal') != $.trim(nickname.val())) {
$(".nickname").css('border','1px solid #d2d2d2');
fCheck.clearTips(".nickname-warning");
$.fn.nickname();
}else{
$(".nickname-warning").css({
display: 'block',
});
}
}
});
});
var boxs = {
nickname: '.nickname',
school:'.school'
}
$.fn.nickname = function(){
var box = $(boxs.nickname),
val = box.val();
if (val == '') {<|fim▁hole|> fCheck.setTips(".nickname-warning",'请输入昵称');
}else {
var reg = /^[0-9a-zA-Z\u4e00-\u9fa5]{1,18}$/;
if(reg.test(val)){
$.fn.nicknameajax();
}else{
fCheck.setTips(".nickname-warning",'只能输入数字、汉字和字母');
}
}
};
$.fn.nicknameajax = function(){
var box = $(boxs.nickname),
val = box.val(),
d_val = $.trim(box.data('lastVal'));
if($.trim(val) != d_val){
$.ajax({
url : '/MyInfos/getNicknameUseful',
type : 'GET',
dataType : 'json',
data : 'nickname=' + $('.nickname').val(),
timeout: 7000,
async: true,
success : function(result){
if(result.sign == false){
fCheck.setTips(".nickname-warning",result.msg);
return false;
} else {
fCheck.clearTips(".nickname-warning");
fCheck.bordercss('.nickname');
$(box).data('lastVal',val);
return true;
}
if(result.sign === 2){
window.location.href = result.msg;
}
}
});
}
}
/* 学校格式验证 */
$.fn.school = function(){
var box = $(boxs.school),
val = box.val();
var text = box.next('.school-warning'),
block = text.addClass('success');
if (val == '') {
fCheck.clearTips(".school-warning");
}else {
var reg = /^[0-9a-zA-Z\u4e00-\u9fa5]{1,50}$/;
if(reg.test(val)){
fCheck.clearTips(".school-warning");
fCheck.bordercss('.school');
}else{
fCheck.setTips(".school-warning",'只能输入数字、汉字和字母');
$('.school').css('border','1px solid #d2d2d2');
return false;
}
}
};
$('.school').on('blur',function(){
$.fn.school();
});
/* 点击提交按钮验证 */
function inforCheckform () {
if ($(".nickname").val() == $(".nickname").data("nickname") && $(".school").val() == $(".school").data("school") && $("#year").find("option:selected").text() == $("#year").attr("rel") && $("#month").find("option:selected").text() == $("#month").attr("rel") && $("#day").find("option:selected").text() == $("#day").attr("rel")) {
alert('您没有修改或新增任何资料');
return false;
}else{
setTimeout($.fn.nickname(),200);
$.fn.school();
}
if ($('.nickname-warning').is(":empty") && $('.school-warning').is(":empty") && $('.date-warning').is(":empty")) {
}else{
return false;
};
}
var messageError = $(".message-error span").is(":empty");
if (messageError == '0') {
$('.message-error').css({
display: 'block'
});
setTimeout("$('.message-error').css({display: 'none'});",6000);
}<|fim▁end|> | |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import os
import sys
# Import the Flask Framework
from flask import Flask, request, render_template
isDev = os.environ["SERVER_SOFTWARE"].find('Development') == 0
app = Flask( __name__ )
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def home():
return render_template( 'index.html', isDev=isDev )
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""<|fim▁hole|> return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500<|fim▁end|> | |
<|file_name|>hostenv_spidermonkey.js<|end_file_name|><|fim▁begin|>version https://git-lfs.github.com/spec/v1
oid sha256:42fcecf8fdabe110af986ac81bb56b598f5a3fa59c6d0c4cc8b80daa2dca0473<|fim▁hole|><|fim▁end|> | size 1121 |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>###
# Copyright (c) 2002-2005, Jeremiah Fincher
# Copyright (c) 2009, James McCoy
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
import supybot.conf as conf
import supybot.ircdb as ircdb
import supybot.ircmsgs as ircmsgs
class ChannelTestCase(ChannelPluginTestCase):
plugins = ('Channel', 'User')
def setUp(self):
super(ChannelTestCase, self).setUp()
self.irc.state.channels[self.channel].addUser('foo')
self.irc.state.channels[self.channel].addUser('bar')
def testLobotomies(self):
self.assertRegexp('lobotomy list', 'not.*any')
## def testCapabilities(self):
## self.prefix = 'foo!bar@baz'
## self.irc.feedMsg(ircmsgs.privmsg(self.irc.nick, 'register foo bar',
## prefix=self.prefix))
## u = ircdb.users.getUser(0)
## u.addCapability('%s.op' % self.channel)
## ircdb.users.setUser(u)
## self.assertNotError(' ')
## self.assertResponse('user capabilities foo', '[]')
## self.assertNotError('channel addcapability foo op')
## self.assertRegexp('channel capabilities foo', 'op')
## self.assertNotError('channel removecapability foo op')
## self.assertResponse('user capabilities foo', '[]')
def testCapabilities(self):
self.assertNotError('channel capability list')
self.assertNotError('channel capability set -foo')
self.assertNotError('channel capability unset -foo')
self.assertError('channel capability unset -foo')
self.assertNotError('channel capability set -foo bar baz')
self.assertRegexp('channel capability list', 'baz')
self.assertNotError('channel capability unset -foo baz')
self.assertError('channel capability unset baz')
def testEnableDisable(self):
self.assertNotRegexp('channel capability list', '-Channel')
self.assertError('channel enable channel')
self.assertNotError('channel disable channel')
self.assertRegexp('channel capability list', '-Channel')
self.assertNotError('channel enable channel')
self.assertNotRegexp('channel capability list', '-Channel')
self.assertNotError('channel disable channel nicks')
self.assertRegexp('channel capability list', '-Channel.nicks')
self.assertNotError('channel enable channel nicks')
self.assertNotRegexp('channel capability list', '-Channel.nicks')
self.assertNotRegexp('channel capability list', 'nicks')
self.assertNotError('channel disable nicks')
self.assertRegexp('channel capability list', 'nicks')
self.assertNotError('channel enable nicks')
self.assertError('channel disable invalidPlugin')
self.assertError('channel disable channel invalidCommand')
def testUnban(self):
self.assertError('unban foo!bar@baz')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
m = self.getMsg('unban foo!bar@baz')
self.assertEqual(m.command, 'MODE')
self.assertEqual(m.args, (self.channel, '-b', 'foo!bar@baz'))
self.assertNoResponse(' ', 2)
def testErrorsWithoutOps(self):
for s in 'op deop halfop dehalfop voice devoice kick invite'.split():
self.assertError('%s foo' % s)
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('%s foo' % s)
self.irc.feedMsg(ircmsgs.deop(self.channel, self.nick))
def testWontDeItself(self):
for s in 'deop dehalfop'.split():
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertError('%s %s' % (s, self.nick))
def testCanDevoiceSelf(self):
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('devoice %s' % self.nick)
def testOp(self):
self.assertError('op')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('op')
m = self.getMsg('op foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'foo'))
m = self.getMsg('op foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+o', 'bar'))
self.irc.state.supported['MODES'] = 2
m = self.getMsg('op foo bar')
try:
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+oo', 'foo', 'bar'))
finally:
self.irc.state.supported['MODES'] = 1
def testHalfOp(self):
self.assertError('halfop')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('halfop')
m = self.getMsg('halfop foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'foo'))
m = self.getMsg('halfop foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+h', 'bar'))
def testVoice(self):
self.assertError('voice')
self.irc.feedMsg(ircmsgs.op(self.channel, self.nick))
self.assertNotError('voice')
m = self.getMsg('voice foo')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'foo'))
m = self.getMsg('voice foo bar')
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'foo'))
m = self.irc.takeMsg()
self.failUnless(m.command == 'MODE' and
m.args == (self.channel, '+v', 'bar'))
def assertKban(self, query, hostmask, **kwargs):
m = self.getMsg(query, **kwargs)
self.assertEqual(m, ircmsgs.ban(self.channel, hostmask))
m = self.getMsg(' ')
self.assertEqual(m.command, 'KICK')
def assertBan(self, query, hostmask, **kwargs):
m = self.getMsg(query, **kwargs)
self.assertEqual(m, ircmsgs.ban(self.channel, hostmask))
def testIban(self):
self.irc.feedMsg(ircmsgs.join(self.channel,
prefix='[email protected]'))
self.assertError('iban foo!bar@baz')<|fim▁hole|> self.assertBan('iban foo!bar@baz', 'foo!bar@baz')
self.assertBan('iban foobar', '[email protected]')
conf.supybot.protocols.irc.strictRfc.setValue(True)
self.assertError('iban $a:nyuszika7h')
self.assertError('unban $a:nyuszika7h')
conf.supybot.protocols.irc.strictRfc.setValue(False)
self.assertBan('iban $a:nyuszika7h', '$a:nyuszika7h')
self.assertNotError('unban $a:nyuszika7h')
## def testKban(self):
## self.irc.prefix = '[email protected]'
## self.irc.nick = 'something'
## self.irc.feedMsg(ircmsgs.join(self.channel,
## prefix='[email protected]'))
## self.assertError('kban foobar')
## self.irc.feedMsg(ircmsgs.op(self.channel, self.irc.nick))
## self.assertError('kban foobar -1')
## self.assertKban('kban foobar', '*!*@*.domain.tld')
## self.assertKban('kban --exact foobar', '[email protected]')
## self.assertKban('kban --host foobar', '*!*@host.domain.tld')
## self.assertKban('kban --user foobar', '*!user@*')
## self.assertKban('kban --nick foobar', 'foobar!*@*')
## self.assertKban('kban --nick --user foobar', 'foobar!user@*')
## self.assertKban('kban --nick --host foobar',
## 'foobar!*@host.domain.tld')
## self.assertKban('kban --user --host foobar', '*[email protected]')
## self.assertKban('kban --nick --user --host foobar',
## '[email protected]')
## self.assertNotRegexp('kban adlkfajsdlfkjsd', 'KeyError')
## self.assertNotRegexp('kban foobar time', 'ValueError')
## self.assertError('kban %s' % self.irc.nick)
def testBan(self):
with conf.supybot.protocols.irc.banmask.context(['exact']):
self.assertNotError('ban add foo!bar@baz')
self.assertNotError('ban remove foo!bar@baz')
orig = conf.supybot.protocols.irc.strictRfc()
with conf.supybot.protocols.irc.strictRfc.context(True):
# something wonky is going on here. irc.error (src/Channel.py|449)
# is being called but the assert is failing
self.assertError('ban add not!a.hostmask')
self.assertNotRegexp('ban add not!a.hostmask', 'KeyError')
self.assertError('ban add $a:nyuszika7h')
self.assertError('ban remove $a:nyuszika7h')
conf.supybot.protocols.irc.strictRfc.setValue(False)
self.assertNotError('ban add $a:nyuszika7h')
self.assertNotError('ban remove $a:nyuszika7h')
def testBanList(self):
self.assertNotError('ban add foo!bar@baz')
self.assertNotError('ban add foobar!*@baz')
self.assertNotError('ban add foobar!qux@baz')
self.assertRegexp('ban list', r'.*foo!bar@baz.*')
self.assertRegexp('ban list', r'.*foobar!\*@baz.*')
self.assertRegexp('ban list', r'.*foobar!qux@baz.*')
self.assertNotRegexp('ban list foobar!*@baz', r'.*foo!bar@baz.*')
self.assertRegexp('ban list foobar!*@baz', r'.*foobar!\*@baz.*')
self.assertRegexp('ban list foobar!*@baz', r'.*foobar!qux@baz.*')
self.assertResponse('ban list foobar!\*@baz',
'"foobar!*@baz" (never expires)')
def testIgnore(self):
orig = conf.supybot.protocols.irc.banmask()
def ignore(given, expect=None):
if expect is None:
expect = given
self.assertNotError('channel ignore add %s' % given)
self.assertResponse('channel ignore list', "'%s'" % expect)
self.assertNotError('channel ignore remove %s' % expect)
self.assertRegexp('channel ignore list', 'not currently')
ignore('foo!bar@baz', '*!*@baz')
ignore('foo!*@*')
with conf.supybot.protocols.irc.banmask.context(['exact']):
ignore('foo!bar@baz')
ignore('foo!*@*')
self.assertError('ban add not!a.hostmask')
def testNicks(self):
self.assertResponse('channel nicks', 'bar, foo, and test')
self.assertResponse('channel nicks --count', '3')
def testPart(self):
def getAfterJoinMessages():
m = self.irc.takeMsg()
self.assertEqual(m.command, 'MODE')
m = self.irc.takeMsg()
self.assertEqual(m.command, 'MODE')
m = self.irc.takeMsg()
self.assertEqual(m.command, 'WHO')
self.assertError('part #foo')
self.assertRegexp('part #foo', 'not in')
self.irc.feedMsg(ircmsgs.join('#foo', prefix=self.prefix))
getAfterJoinMessages()
m = self.getMsg('part #foo')
self.assertEqual(m.command, 'PART')
self.irc.feedMsg(ircmsgs.join('#foo', prefix=self.prefix))
getAfterJoinMessages()
m = self.getMsg('part #foo reason')
self.assertEqual(m.command, 'PART')
self.assertEqual(m.args[0], '#foo')
self.assertEqual(m.args[1], 'reason')
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:<|fim▁end|> | self.irc.feedMsg(ircmsgs.op(self.channel, self.irc.nick)) |
<|file_name|>htmltablerowelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::HTMLTableRowElementBinding;
use dom::bindings::utils::{DOMString, ErrorResult};
use dom::document::AbstractDocument;
use dom::element::HTMLTableRowElementTypeId;<|fim▁hole|>use dom::htmlelement::HTMLElement;
use dom::node::{AbstractNode, Node};
pub struct HTMLTableRowElement {
htmlelement: HTMLElement,
}
impl HTMLTableRowElement {
pub fn new_inherited(localName: ~str, document: AbstractDocument) -> HTMLTableRowElement {
HTMLTableRowElement {
htmlelement: HTMLElement::new_inherited(HTMLTableRowElementTypeId, localName, document)
}
}
pub fn new(localName: ~str, document: AbstractDocument) -> AbstractNode {
let element = HTMLTableRowElement::new_inherited(localName, document);
Node::reflect_node(@mut element, document, HTMLTableRowElementBinding::Wrap)
}
}
impl HTMLTableRowElement {
pub fn RowIndex(&self) -> i32 {
0
}
pub fn GetRowIndex(&self) -> i32 {
0
}
pub fn SectionRowIndex(&self) -> i32 {
0
}
pub fn GetSectionRowIndex(&self) -> i32 {
0
}
pub fn DeleteCell(&mut self, _index: i32) -> ErrorResult {
Ok(())
}
pub fn Align(&self) -> DOMString {
~""
}
pub fn SetAlign(&self, _align: DOMString) -> ErrorResult {
Ok(())
}
pub fn Ch(&self) -> DOMString {
~""
}
pub fn SetCh(&self, _ch: DOMString) -> ErrorResult {
Ok(())
}
pub fn ChOff(&self) -> DOMString {
~""
}
pub fn SetChOff(&self, _ch_off: DOMString) -> ErrorResult {
Ok(())
}
pub fn VAlign(&self) -> DOMString {
~""
}
pub fn SetVAlign(&self, _v_align: DOMString) -> ErrorResult {
Ok(())
}
pub fn BgColor(&self) -> DOMString {
~""
}
pub fn SetBgColor(&self, _bg_color: DOMString) -> ErrorResult {
Ok(())
}
}<|fim▁end|> | |
<|file_name|>impl_method_type_after.rs<|end_file_name|><|fim▁begin|>pub struct FooBar;
<|fim▁hole|>trait T {
fn foo(self, f: &mut FooBar);
}
impl T for S {
fn foo(self, f: FooBar)
}<|fim▁end|> | struct S;
|
<|file_name|>test_nutnr_m_glider.py<|end_file_name|><|fim▁begin|>"""
@package mi.dataset.parser.test
@file mi/dataset/parser/test/test_nutnr_m_glider.py
@author Emily Hahn
@brief A test parser for the nutnr series m instrument through a glider
"""
import os
from nose.plugins.attrib import attr
from mi.core.exceptions import SampleException, ConfigurationException, DatasetParserException
from mi.core.log import get_logger
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.driver.nutnr_m.glider.resource import RESOURCE_PATH
from mi.dataset.parser.glider import GliderParser
from mi.dataset.test.test_parser import ParserUnitTestCase
__author__ = 'Emily Hahn'
__license__ = 'Apache 2.0'
log = get_logger()
@attr('UNIT', group='mi')
class NutnrMGliderParserUnitTestCase(ParserUnitTestCase):
config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.glider',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'NutnrMDataParticle'
}
def test_simple(self):
"""
Test a simple case that we can parse a single message
"""
with open(os.path.join(RESOURCE_PATH, 'single.mrg'), 'rU') as file_handle:
parser = GliderParser(self.config, file_handle, self.exception_callback)
particles = parser.get_records(1)
self.assert_particles(particles, "single.yml", RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_many(self):
"""
Test a simple case with more messages
"""
with open(os.path.join(RESOURCE_PATH, 'many.mrg'), 'rU') as file_handle:
parser = GliderParser(self.config, file_handle, self.exception_callback)
particles = parser.get_records(12)
# requested more than are available in file, should only be 10
self.assertEquals(len(particles), 10)
self.assert_particles(particles, "many.yml", RESOURCE_PATH)
self.assertEqual(self.exception_callback_value, [])
def test_full(self):
"""
Test a full file and confirm the right number of particles is returned
"""
with open(os.path.join(RESOURCE_PATH, 'unit_514-2014-351-2-0.mrg'), 'rU') as file_handle:
parser = GliderParser(self.config, file_handle, self.exception_callback)
particles = parser.get_records(40)
# requested more than are available in file, should only be 10
self.assertEquals(len(particles), 31)
self.assertEqual(self.exception_callback_value, [])
def test_empty(self):
"""
An empty file will return a sample exception since it cannot read the header
"""
file_handle = open(os.path.join(RESOURCE_PATH, 'empty.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)
particles = parser.get_records(1)
# requested more than are available in file, should only be 10
self.assertEquals(len(particles), 0)
def test_bad_config(self):
"""
Test that a set of bad configurations produces the expected exceptions
"""
file_handle = open(os.path.join(RESOURCE_PATH, 'single.mrg'), 'rU')
# confirm a configuration exception occurs if no config is passed in
with self.assertRaises(ConfigurationException):
GliderParser({}, file_handle, self.exception_callback)
# confirm a config missing the particle class causes an exception
bad_config = {DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.glider'}
with self.assertRaises(ConfigurationException):
GliderParser(bad_config, file_handle, self.exception_callback)
# confirm a config with a non existing class causes an exception
bad_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.glider',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'BadDataParticle'
}
with self.assertRaises(AttributeError):
GliderParser(bad_config, file_handle, self.exception_callback)
def test_bad_headers(self):
"""
Test that a file with a short header raises a sample exception
"""
# this file does not have enough header lines
file_handle = open(os.path.join(RESOURCE_PATH, 'short_header.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)
parser.get_records(1)
# this file specifies a number of header lines other than 14
file_handle = open(os.path.join(RESOURCE_PATH, 'bad_num_header_lines.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)
parser.get_records(1)
# this file specifies a number of label lines other than 3
file_handle = open(os.path.join(RESOURCE_PATH, 'bad_num_label_lines.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)
parser.get_records(1)
def test_missing_time(self):
"""
Test that a file which is missing the required m_present_time field for timestamps raises a sample exception
"""
# this file is missing the m_present_time label
file_handle = open(os.path.join(RESOURCE_PATH, 'no_time_label.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)
parser.get_records(1)
def test_short_data(self):
"""
Test that if the number of columns in the header do not match the number of columns in the data an
exception occurs
"""
# this file is has two columns removed from the data libe
file_handle = open(os.path.join(RESOURCE_PATH, 'short_data.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)<|fim▁hole|>
def test_bad_sensors_per_cycle(self):
"""
Test that if the number of sensors per cycle from the header does not match that in the header that an
exception in the callback occurs, but processing continues
"""
with open(os.path.join(RESOURCE_PATH, 'bad_sensors_per_cycle.mrg'), 'rU') as file_handle:
parser = GliderParser(self.config, file_handle, self.exception_callback)
particles = parser.get_records(1)
self.assert_particles(particles, "single.yml", RESOURCE_PATH)
self.assertEqual(len(self.exception_callback_value), 1)
self.assertIsInstance(self.exception_callback_value[0], SampleException)
def test_short_units(self):
"""
Test that if the number of label columns does not match the units number of columns an exception occurs
"""
# this file is has two columns removed from the data libe
file_handle = open(os.path.join(RESOURCE_PATH, 'short_units.mrg'), 'rU')
with self.assertRaises(DatasetParserException):
parser = GliderParser(self.config, file_handle, self.exception_callback)
parser.get_records(1)<|fim▁end|> |
parser.get_records(1) |
<|file_name|>settings.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-settings',
templateUrl: './settings.component.html',
styleUrls: ['./settings.component.scss']
})
export class SettingsComponent implements OnInit {
<|fim▁hole|> ngOnInit() {
}
}<|fim▁end|> | constructor() {
}
|
<|file_name|>pyreringconfig.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Copyright 2008 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module to control the application global properties.
This module will manage a singleton object for the PyreRing global
properties. These properties include: root_dir, testdatabase etc.
These properties are stored as a dictionary which is referred through a global
variable and managed by some module level methods in this module.
"""
__author__ = '[email protected] (Mingyu Wu)'
import getpass
import os
import time
from lib import filesystemhandlerextend
# Runtime configuration keys, user can't overwrite through config file.
NON_OVERWRITTEN_KEYS = ['time', 'tester', 'host_name']
class PyreRingConfig(object):
"""A class to store PyreRing runtime config info in a dict.
This class is used to manage the pyrering related configuration data
and it will have a dictionary to hold them and pushed to global. It should be
maintained as a single instance.
During the whole test run, this is the only one copy of the properties.
It will contain a dictionary with key value pairs from the config file and
some extra items generated automatically, namely:
Automatically set by PyreRing, not user configurable:
root_dir: PyreRing root directory.
PyreRing automatically discovers it.
host_name: The machine name PyreRing is running on.
PyreRing automatically discovers it.
tester: The user account PyreRing is running as.
PyreRing automatically discovers it.
time: The time string identifies the pyrering was started.
PyreRing automatically discovers it.
Managed by config file only, not through command line:
log_level: The logging level as defined in Python logging module.
default value is INFO
skip_setup: If True, PyreRing will skip user setup suite.
default value is False.
header_file: User specified report header file which will be insert into
PyreRing report.
default value is <root_dir>/header_info.txt
FATAL_STRING: a string contains comma separated substrings. If any
substring is found in the test output, the test will fail,
regardless of the return code of the test.
default_suite: The name of default test suite, not currently used.
No default value.
Managed by config file and user can overwrite through command line options:
report_dir: the PyreRing report and log directory.
default value <root_dir>/reports/
conf_file: the name of PyreRing config file with path. If a non_absolute
path provided, the actual value will be os.path.join(ed) with
'<root_dir>/conf'
default name is pyrering.conf
project_name: The name of a project PyreRing will test on.
sendmail: a boolean value if PyreRing should send out email report or not.
default value is False. Note: there will be no email if all test
passed regardless of this flag.
email_recipients: comma separated email addresses as email recipients.
default value is the same as tester.
log_file: the name of the log file. If a non_absulte path provided, the
the actual value will be os.path.join(ed) with
'<root_dir>/report'
default name is pyrering.log
file_errors: a boolean value that turns on filing the output of each none
passing testcase to a separate output file.
reset: a boolean value user sets from the command line. If true, the run
time configuration will replace existing configuration file. It has
no effect in the conf file.
"""
def __init__(self,
filesystem=filesystemhandlerextend.FileSystemHandlerExtend()):
self.settings = {}
self.filesystem = filesystem
def _CreateConfig(self):<|fim▁hole|>
Returns:
None. The constructed info write to conf_file
"""
key_list = sorted(self.settings.keys())
output = ''.join(['%s=%s\n' % (key, self.settings[key])
for key in key_list])
self.filesystem.WriteToFile(self.settings['conf_file'], output)
print """
***********Attention Please***************************
Either no configuration file was found at: %s
Or a reset option was issued.
Creating a default configuration file.
User can edit it later to change default values at: %s.
******************************************************
""" % (self.settings['conf_file'], self.settings['conf_file'])
def _ReadConfig(self):
"""Convert the conf_file to a dictionary.
Returns:
a dictionary with key value pairs from the conf file.
"""
settings = {}
conf_handler = self.filesystem.FileOpenForRead(self.settings['conf_file'])
for line in conf_handler:
line = line.strip()
if (not line) or line.startswith('#') or (not '=' in line):
continue
key, value = line.split('=', 1)
# make it java.util.Properties like property reader.
# so I have to strip the quotes around the values
key = key.strip()
value = value.strip(' \t\r\'"')
# sendmail, reset and skip_setup should be treated as boolean values,
# others are treated as strings.
if key in ['sendmail', 'reset', 'skip_setup']:
settings[key] = (value.lower().startswith('true') or
value.startswith('1'))
else:
settings[key] = value
conf_handler.close()
# Remove the config we don't need. Most likely they will be generated on the
# runtime.
for key in NON_OVERWRITTEN_KEYS:
settings.pop(key, None)
return settings
def _AddDefaultConfig(self, pyrering_root):
"""Populate the settings dictionary with default values.
This method will provide a base configuration dictionary for PyreRing.
Args:
pyrering_root: path refer to the pyrering root dir.
Returns:
None.
"""
self.settings.update({
'root_dir': pyrering_root,
'report_dir': self.filesystem.PathJoin(pyrering_root, 'reports'),
'conf_file': self.filesystem.PathJoin(pyrering_root,
'conf',
'pyrering.conf'),
'host_name': self.filesystem.GetHostName(),
'tester': getpass.getuser(),
'project_name': '<YOUR PROJECT NAME>',
'default_suite': 'default_suite',
'source_dir': '<YOUR TEST SCRIPT TOP DIRECTORY>',
'sendmail': False,
'email_recipients': getpass.getuser(),
'log_file': 'pyrering.log',
'file_errors': False,
'reset': False,
'runner': 'baserunner',
'FATAL_STRING': '',
'header_file': 'header_info.txt',
'skip_setup': False,
'log_level': 'INFO',
# A timestamp string to identify the time pyrering is started.
# The format should be yyymmddHHMM
'time': time.strftime('%Y%m%d%H%M'),
})
def Populate(self, pyrering_root, user_settings):
"""Populate settings dictionary.
If the conf file exist, it will use user settings update conf file
settings and update default settings.
If the conf file doesn't exist, it will user user settings update default
settings and export as conf file.
Args:
pyrering_root: the path of the project root
user_settings: user settings dictionary
Returns:
None. self.settings will have the effective values.
"""
pyrering_root = self.filesystem.FindAbsPath(pyrering_root)
# If config file is not set in the user arguments, use the default one:
# '<pyrering_root>/conf/pyrering.conf' to populate the default
# dictionary. Create the directory if it doesn't exist.
if not user_settings.get('conf_file', None):
conf_path = self.filesystem.PathJoin(pyrering_root, 'conf')
else:
conf_path = os.path.dirname(user_settings.get('conf_file'))
if not self.filesystem.CheckDir(conf_path):
self.filesystem.MkDir(conf_path)
self._AddDefaultConfig(pyrering_root)
self.settings.update(user_settings)
# if the conf_file exists, read it, else populate the conf file and inform
# user to examine.
if (not user_settings.get('reset', False) and
self.filesystem.CheckFile(self.settings['conf_file'])):
# The user_settings coming from the command line will update the
# config file settings.
read_conf_dict = self._ReadConfig()
read_conf_dict.update(user_settings)
self.settings.update(read_conf_dict)
else:
self._CreateConfig()
# If after all this settings, the source_dir is still not set, we will
# temporarily set it as current dir to let user run script from current
# directory.
if self.settings['source_dir'] == '<YOUR TEST SCRIPT TOP DIRECTORY>':
self.settings['source_dir'] = self.filesystem.FindAbsPath('.')
# The GlobalPyreRingConfig should be one and only instance in the PyreRing
# life cycle.
GlobalPyreRingConfig = PyreRingConfig()
def Init(pyrering_root, user_settings):
"""Get settings populated.
This method will check if settings still empty means it is never initialized,
then it calls populate to populate the settings for use.
Args:
pyrering_root: the path of the root dir of pyrering.py file
user_settings: a dictionary populated with settings.
Returns:
None.
"""
if not GlobalPyreRingConfig.settings.keys():
GlobalPyreRingConfig.Populate(pyrering_root, user_settings)
return
def Update(new_settings):
"""Update the settings with new values."""
GlobalPyreRingConfig.settings.update(new_settings)
def Reset():
"""Clean up the contents of settings."""
GlobalPyreRingConfig.settings.clear()<|fim▁end|> | """Create a config file based on user config plus default config.
This method should create a new config file using some runtime information. |
<|file_name|>predict.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate log;
extern crate fern;
extern crate time;
extern crate gpredict;
use gpredict::{Predict, Location, Tle};
use std::thread;
fn conf_logger() {
let logger_config = fern::DispatchConfig {
format: Box::new(|msg: &str, level: &log::LogLevel, _location: &log::LogLocation| {
let t = time::now();
let ms = t.tm_nsec/1000_000;
format!("{}.{:3} [{}] {}", t.strftime("%Y-%m-%dT%H:%M:%S").unwrap(), ms, level, msg)
}),
output: vec![fern::OutputConfig::stderr()],
level: log::LogLevelFilter::Trace,
};
if let Err(e) = fern::init_global_logger(logger_config, log::LogLevelFilter::Trace) {
panic!("Failed to initialize global logger: {}", e);
}
}
fn main() {
// setup fern logger
conf_logger();
// start processing
info!("predict example started");
let tle: Tle = Tle {
name: "GRIFEX".to_string(),
line1: "1 40379U 15003D 15243.42702278 .00003367 00000-0 17130-3 0 9993".to_string(),
line2: "2 40379 99.1124 290.6779 0157088 8.9691 351.4280 15.07659299 31889".to_string()
};
let location: Location = Location{lat_deg:58.64560, lon_deg: 23.15163, alt_m: 8.};
let mut predict: Predict = Predict::new(&tle, &location);
loop {
// these two are the same:
//predict.update(Some(time::now_utc()));<|fim▁hole|>
info!("aos : {:}", predict.sat.aos.expect("do not have AOS with this satellite").to_utc().rfc3339());
info!("los : {:}", predict.sat.los.expect("do not have LOS with this satellite").to_utc().rfc3339());
info!("az : {:.2}°", predict.sat.az_deg);
info!("el : {:.2}°", predict.sat.el_deg);
info!("range : {:.0} km", predict.sat.range_km);
info!("range rate : {:.3} km/sec\n", predict.sat.range_rate_km_sec);
thread::sleep_ms(1000);
}
}<|fim▁end|> | predict.update(None); |
<|file_name|>datastore.rs<|end_file_name|><|fim▁begin|>/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This software may be used and distributed according to the terms of the
* GNU General Public License version 2.
*/
use std::io::Cursor;
use std::io::Write;
use anyhow::format_err;
use anyhow::Result;
use byteorder::BigEndian;
use byteorder::ReadBytesExt;
use byteorder::WriteBytesExt;
#[cfg(any(test, feature = "for-tests"))]
use quickcheck_arbitrary_derive::Arbitrary;
use serde_derive::Deserialize;
use serde_derive::Serialize;
#[derive(Clone, Copy, Debug, Default, Eq, PartialEq, Serialize, Deserialize)]
#[cfg_attr(any(test, feature = "for-tests"), derive(Arbitrary))]
pub struct Metadata {
pub size: Option<u64>,
pub flags: Option<u64>,
}
impl Metadata {
pub const LFS_FLAG: u64 = 0x2000;
/// Returns true if the blob retrieved from `DataStore::get` is an LFS pointer.
pub fn is_lfs(&self) -> bool {
match self.flags {
None => false,
Some(flag) => (flag & Metadata::LFS_FLAG) == Metadata::LFS_FLAG,
}
}
pub fn write<T: Write>(&self, writer: &mut T) -> Result<()> {
let mut buf = vec![];
if let Some(flags) = self.flags {
if flags != 0 {
Metadata::write_meta(b'f', flags, &mut buf)?;
}
}
if let Some(size) = self.size {
Metadata::write_meta(b's', size, &mut buf)?;
}
writer.write_u32::<BigEndian>(buf.len() as u32)?;
writer.write_all(buf.as_ref())?;
Ok(())
}
fn write_meta<T: Write>(flag: u8, value: u64, writer: &mut T) -> Result<()> {
writer.write_u8(flag as u8)?;
writer.write_u16::<BigEndian>(u64_to_bin_len(value))?;
u64_to_bin(value, writer)?;
Ok(())
}<|fim▁hole|> let mut size: Option<u64> = None;
let mut flags: Option<u64> = None;
let start_offset = cur.position();
while cur.position() < start_offset + metadata_len {
let key = cur.read_u8()?;
let value_len = cur.read_u16::<BigEndian>()? as usize;
match key {
b'f' => {
let buf = cur.get_ref();
flags = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
b's' => {
let buf = cur.get_ref();
size = Some(bin_to_u64(
&buf[cur.position() as usize..cur.position() as usize + value_len],
));
}
_ => return Err(format_err!("invalid metadata format '{:?}'", key)),
}
let cur_pos = cur.position();
cur.set_position(cur_pos + value_len as u64);
}
Ok(Metadata { flags, size })
}
}
/// Precompute the size of a u64 when it is serialized
fn u64_to_bin_len(value: u64) -> u16 {
let mut value = value;
let mut count = 0;
while value > 0 {
count += 1;
value >>= 8;
}
count
}
/// Converts an integer into a buffer using a special format used in the datapack format.
fn u64_to_bin<T: Write>(value: u64, writer: &mut T) -> Result<()> {
let mut value = value;
let mut buf = [0; 8];
let len = u64_to_bin_len(value) as usize;
let mut pos = len;
while value > 0 {
pos -= 1;
buf[pos] = value as u8;
value >>= 8;
}
assert!(value == 0 && pos == 0);
writer.write_all(&buf[0..len])?;
Ok(())
}
/// Converts a buffer to an integer using a special format used in the datapack format.
fn bin_to_u64(buf: &[u8]) -> u64 {
let mut n: u64 = 0;
for byte in buf.iter() {
n <<= 8;
n |= *byte as u64;
}
n
}
#[cfg(test)]
mod tests {
use quickcheck::quickcheck;
use super::*;
quickcheck! {
fn test_roundtrip_bin_to_u64(value: u64) -> bool {
let mut buf: Vec<u8> = vec![];
u64_to_bin(value, &mut buf).unwrap();
if buf.len() != u64_to_bin_len(value) as usize {
return false;
}
let new_value = bin_to_u64(&buf);
value == new_value
}
fn test_roundtrip_metadata(size: Option<u64>, flags: Option<u64>) -> bool {
let meta = Metadata { size, flags };
let mut buf: Vec<u8> = vec![];
meta.write(&mut buf).expect("write");
let read_meta = Metadata::read(&mut Cursor::new(&buf)).expect("read");
meta.size == read_meta.size && (meta.flags == read_meta.flags || meta.flags.map_or(false, |v| v == 0))
}
}
}<|fim▁end|> |
pub fn read(cur: &mut Cursor<&[u8]>) -> Result<Metadata> {
let metadata_len = cur.read_u32::<BigEndian>()? as u64; |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod scene_renderable;
pub use self::scene_renderable::{
MeshRenderable,<|fim▁hole|>
mod scene_renderer;
pub use self::scene_renderer::{SceneRenderer};
mod primitives;
pub use self::primitives::{
icosahedron_renderable,
unit_sphere_renderable,
box_renderable
};
mod window;
pub use self::window::{Window, Frame};
mod color;
pub use self::color::{Color};<|fim▁end|> | RenderData,
SceneRenderable,
SceneRenderableStore
}; |
<|file_name|>reward.py<|end_file_name|><|fim▁begin|>"""
Computes and stores a lookup table for a given environment and reward function.
A list of reward functions will be added here and refered to by the keyword "rType".
"""
class Reward:
def __init__(self,environment,rType):<|fim▁hole|><|fim▁end|> |
def exampleReward(self,environment):
return |
<|file_name|>decompile.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import sys
from builtins import input
from builtins import map
# This file is part of Androguard.
#
# Copyright (c) 2012 Geoffroy Gueguen <[email protected]>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import next
from builtins import object
from builtins import range
from builtins import str
sys.path.append('./')
import logging
import struct
from collections import defaultdict
import androguard.core.androconf as androconf
import androguard.decompiler.dad.util as util
from androguard.core.analysis import analysis
from androguard.core.bytecodes import apk, dvm
from androguard.decompiler.dad.ast import (
JSONWriter, parse_descriptor, literal_string, literal_hex_int,
dummy)
from androguard.decompiler.dad.control_flow import identify_structures
from androguard.decompiler.dad.dataflow import (
build_def_use, place_declarations, dead_code_elimination,
register_propagation, split_variables)
from androguard.decompiler.dad.graph import construct, simplify, split_if_nodes
from androguard.decompiler.dad.instruction import Param, ThisParam
from androguard.decompiler.dad.writer import Writer
from androguard.util import read
def auto_vm(filename):
ret = androconf.is_android(filename)
if ret == 'APK':
return dvm.DalvikVMFormat(apk.APK(filename).get_dex())
elif ret == 'DEX':
return dvm.DalvikVMFormat(read(filename))
elif ret == 'DEY':
return dvm.DalvikOdexVMFormat(read(filename))
return None
# No seperate DvField class currently
def get_field_ast(field):
triple = field.get_class_name()[1:-1], field.get_name(
), field.get_descriptor()
expr = None
if field.init_value:
val = field.init_value.value
expr = dummy(str(val))
if val is not None:
if field.get_descriptor() == 'Ljava/lang/String;':
expr = literal_string(val)
elif field.proto == 'B':
expr = literal_hex_int(struct.unpack('<b', struct.pack("B", val))[0])
return {
'triple': triple,
'type': parse_descriptor(field.get_descriptor()),
'flags': util.get_access_field(field.get_access_flags()),
'expr': expr,
}
class DvMethod(object):
def __init__(self, methanalysis):
method = methanalysis.get_method()
self.method = method
self.start_block = next(methanalysis.get_basic_blocks().get(), None)
self.cls_name = method.get_class_name()
self.name = method.get_name()
self.lparams = []
self.var_to_name = defaultdict()
self.writer = None
self.graph = None
self.ast = None
<|fim▁hole|> self.params_type = util.get_params_type(desc)
self.triple = method.get_triple()
self.exceptions = methanalysis.exceptions.exceptions
code = method.get_code()
if code is None:
logger.debug('No code : %s %s', self.name, self.cls_name)
else:
start = code.registers_size - code.ins_size
if 'static' not in self.access:
self.var_to_name[start] = ThisParam(start, self.cls_name)
self.lparams.append(start)
start += 1
num_param = 0
for ptype in self.params_type:
param = start + num_param
self.lparams.append(param)
self.var_to_name[param] = Param(param, ptype)
num_param += util.get_type_size(ptype)
if not __debug__:
from androguard.core import bytecode
bytecode.method2png('/tmp/dad/graphs/%s#%s.png' % \
(self.cls_name.split('/')[-1][:-1], self.name), methanalysis)
def process(self, doAST=False):
logger.debug('METHOD : %s', self.name)
# Native methods... no blocks.
if self.start_block is None:
logger.debug('Native Method.')
if doAST:
self.ast = JSONWriter(None, self).get_ast()
else:
self.writer = Writer(None, self)
self.writer.write_method()
return
graph = construct(self.start_block, self.var_to_name, self.exceptions)
self.graph = graph
if not __debug__:
util.create_png(self.cls_name, self.name, graph, '/tmp/dad/blocks')
use_defs, def_uses = build_def_use(graph, self.lparams)
split_variables(graph, self.var_to_name, def_uses, use_defs)
dead_code_elimination(graph, def_uses, use_defs)
register_propagation(graph, def_uses, use_defs)
# FIXME var_to_name need to contain the created tmp variables.
# This seems to be a workaround, we add them into the list manually
for var, i in def_uses:
if not isinstance(var, int):
self.var_to_name[var] = var.upper()
place_declarations(graph, self.var_to_name, def_uses, use_defs)
del def_uses, use_defs
# After the DCE pass, some nodes may be empty, so we can simplify the
# graph to delete these nodes.
# We start by restructuring the graph by spliting the conditional nodes
# into a pre-header and a header part.
split_if_nodes(graph)
# We then simplify the graph by merging multiple statement nodes into
# a single statement node when possible. This also delete empty nodes.
simplify(graph)
graph.compute_rpo()
if not __debug__:
util.create_png(self.cls_name, self.name, graph,
'/tmp/dad/pre-structured')
identify_structures(graph, graph.immediate_dominators())
if not __debug__:
util.create_png(self.cls_name, self.name, graph,
'/tmp/dad/structured')
if doAST:
self.ast = JSONWriter(graph, self).get_ast()
else:
self.writer = Writer(graph, self)
self.writer.write_method()
def get_ast(self):
return self.ast
def show_source(self):
print(self.get_source())
def get_source(self):
if self.writer:
return str(self.writer)
return ''
def get_source_ext(self):
if self.writer:
return self.writer.str_ext()
return []
def __repr__(self):
# return 'Method %s' % self.name
return 'class DvMethod(object): %s' % self.name
class DvClass(object):
def __init__(self, dvclass, vma):
name = dvclass.get_name()
if name.find('/') > 0:
pckg, name = name.rsplit('/', 1)
else:
pckg, name = '', name
self.package = pckg[1:].replace('/', '.')
self.name = name[:-1]
self.vma = vma
self.methods = dvclass.get_methods()
self.fields = dvclass.get_fields()
self.code = []
self.inner = False
access = dvclass.get_access_flags()
# If interface we remove the class and abstract keywords
if 0x200 & access:
prototype = '%s %s'
if access & 0x400:
access -= 0x400
else:
prototype = '%s class %s'
self.access = util.get_access_class(access)
self.prototype = prototype % (' '.join(self.access), self.name)
self.interfaces = dvclass.get_interfaces()
self.superclass = dvclass.get_superclassname()
self.thisclass = dvclass.get_name()
logger.info('Class : %s', self.name)
logger.info('Methods added :')
for meth in self.methods:
logger.info('%s (%s, %s)', meth.get_method_idx(), self.name,
meth.name)
logger.info('')
def get_methods(self):
return self.methods
def process_method(self, num, doAST=False):
method = self.methods[num]
if not isinstance(method, DvMethod):
self.methods[num] = DvMethod(self.vma.get_method(method))
self.methods[num].process(doAST=doAST)
else:
method.process(doAST=doAST)
def process(self, doAST=False):
for i in range(len(self.methods)):
try:
self.process_method(i, doAST=doAST)
except Exception as e:
logger.warning('Error decompiling method %s: %s', self.methods[i], e)
def get_ast(self):
fields = [get_field_ast(f) for f in self.fields]
methods = []
for m in self.methods:
if isinstance(m, DvMethod) and m.ast:
methods.append(m.get_ast())
isInterface = 'interface' in self.access
return {
'rawname': self.thisclass[1:-1],
'name': parse_descriptor(self.thisclass),
'super': parse_descriptor(self.superclass),
'flags': self.access,
'isInterface': isInterface,
'interfaces': list(map(parse_descriptor, self.interfaces)),
'fields': fields,
'methods': methods,
}
def get_source(self):
source = []
if not self.inner and self.package:
source.append('package %s;\n' % self.package)
superclass, prototype = self.superclass, self.prototype
if superclass is not None and superclass != 'Ljava/lang/Object;':
superclass = superclass[1:-1].replace('/', '.')
prototype += ' extends %s' % superclass
if len(self.interfaces) > 0:
prototype += ' implements %s' % ', '.join(
[n[1:-1].replace('/', '.') for n in self.interfaces])
source.append('%s {\n' % prototype)
for field in self.fields:
name = field.get_name()
access = util.get_access_field(field.get_access_flags())
f_type = util.get_type(field.get_descriptor())
source.append(' ')
if access:
source.append(' '.join(access))
source.append(' ')
init_value = field.get_init_value()
if init_value:
value = init_value.value
if f_type == 'String':
if value:
value = '"%s"' % value.encode("unicode-escape").decode("ascii")
else:
# FIXME we can not check if this value here is null or ""
# In both cases we end up here...
value = '""'
elif field.proto == 'B':
# byte value: convert from unsiged int to signed and print as hex
# as bytes are signed in Java
value = hex(struct.unpack("b", struct.pack("B", value))[0])
source.append('%s %s = %s;\n' % (f_type, name, value))
else:
source.append('%s %s;\n' % (f_type, name))
for method in self.methods:
if isinstance(method, DvMethod):
source.append(method.get_source())
source.append('}\n')
return ''.join(source)
def get_source_ext(self):
source = []
if not self.inner and self.package:
source.append(
('PACKAGE', [('PACKAGE_START', 'package '), (
'NAME_PACKAGE', '%s' % self.package), ('PACKAGE_END', ';\n')
]))
list_proto = [('PROTOTYPE_ACCESS', '%s class ' % ' '.join(self.access)),
('NAME_PROTOTYPE', '%s' % self.name, self.package)]
superclass = self.superclass
if superclass is not None and superclass != 'Ljava/lang/Object;':
superclass = superclass[1:-1].replace('/', '.')
list_proto.append(('EXTEND', ' extends '))
list_proto.append(('NAME_SUPERCLASS', '%s' % superclass))
if len(self.interfaces) > 0:
list_proto.append(('IMPLEMENTS', ' implements '))
for i, interface in enumerate(self.interfaces):
if i != 0:
list_proto.append(('COMMA', ', '))
list_proto.append(
('NAME_INTERFACE', interface[1:-1].replace('/', '.')))
list_proto.append(('PROTOTYPE_END', ' {\n'))
source.append(("PROTOTYPE", list_proto))
for field in self.fields:
field_access_flags = field.get_access_flags()
access = [util.ACCESS_FLAGS_FIELDS[flag]
for flag in util.ACCESS_FLAGS_FIELDS
if flag & field_access_flags]
f_type = util.get_type(field.get_descriptor())
name = field.get_name()
if access:
access_str = ' %s ' % ' '.join(access)
else:
access_str = ' '
value = None
init_value = field.get_init_value()
if init_value:
value = init_value.value
if f_type == 'String':
if value:
value = ' = "%s"' % value.encode("unicode-escape").decode("ascii")
else:
# FIXME we can not check if this value here is null or ""
# In both cases we end up here...
value = ' = ""'
elif field.proto == 'B':
# a byte
value = ' = %s' % hex(struct.unpack("b", struct.pack("B", value))[0])
else:
value = ' = %s' % str(value)
if value:
source.append(
('FIELD', [('FIELD_ACCESS', access_str), (
'FIELD_TYPE', '%s' % f_type), ('SPACE', ' '), (
'NAME_FIELD', '%s' % name, f_type, field), ('FIELD_VALUE', value), ('FIELD_END',
';\n')]))
else:
source.append(
('FIELD', [('FIELD_ACCESS', access_str), (
'FIELD_TYPE', '%s' % f_type), ('SPACE', ' '), (
'NAME_FIELD', '%s' % name, f_type, field), ('FIELD_END',
';\n')]))
for method in self.methods:
if isinstance(method, DvMethod):
source.append(("METHOD", method.get_source_ext()))
source.append(("CLASS_END", [('CLASS_END', '}\n')]))
return source
def show_source(self):
print(self.get_source())
def __repr__(self):
return 'Class(%s)' % self.name
class DvMachine(object):
def __init__(self, name):
vm = auto_vm(name)
if vm is None:
raise ValueError('Format not recognised: %s' % name)
self.vma = analysis.Analysis(vm)
self.classes = dict((dvclass.get_name(), dvclass)
for dvclass in vm.get_classes())
# util.merge_inner(self.classes)
def get_classes(self):
return list(self.classes.keys())
def get_class(self, class_name):
for name, klass in self.classes.items():
if class_name in name:
if isinstance(klass, DvClass):
return klass
dvclass = self.classes[name] = DvClass(klass, self.vma)
return dvclass
def process(self):
for name, klass in self.classes.items():
logger.info('Processing class: %s', name)
if isinstance(klass, DvClass):
klass.process()
else:
dvclass = self.classes[name] = DvClass(klass, self.vma)
dvclass.process()
def show_source(self):
for klass in self.classes.values():
klass.show_source()
def process_and_show(self):
for name, klass in sorted(self.classes.items()):
logger.info('Processing class: %s', name)
if not isinstance(klass, DvClass):
klass = DvClass(klass, self.vma)
klass.process()
klass.show_source()
logger = logging.getLogger('dad')
sys.setrecursionlimit(5000)
def main():
# logger.setLevel(logging.DEBUG) for debugging output
# comment the line to disable the logging.
logger.setLevel(logging.INFO)
console_hdlr = logging.StreamHandler(sys.stdout)
console_hdlr.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logger.addHandler(console_hdlr)
default_file = 'examples/android/TestsAndroguard/bin/TestActivity.apk'
if len(sys.argv) > 1:
machine = DvMachine(sys.argv[1])
else:
machine = DvMachine(default_file)
logger.info('========================')
logger.info('Classes:')
for class_name in sorted(machine.get_classes()):
logger.info(' %s', class_name)
logger.info('========================')
cls_name = input('Choose a class: ')
if cls_name == '*':
machine.process_and_show()
else:
cls = machine.get_class(cls_name)
if cls is None:
logger.error('%s not found.', cls_name)
else:
logger.info('======================')
for i, method in enumerate(cls.get_methods()):
logger.info('%d: %s', i, method.name)
logger.info('======================')
meth = input('Method: ')
if meth == '*':
logger.info('CLASS = %s', cls)
cls.process()
else:
cls.process_method(int(meth))
logger.info('Source:')
logger.info('===========================')
cls.show_source()
if __name__ == '__main__':
main()<|fim▁end|> | self.access = util.get_access_method(method.get_access_flags())
desc = method.get_descriptor()
self.type = desc.split(')')[-1] |
<|file_name|>get_lines.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 - Oscar Campos <[email protected]>
# This program is Free Software see LICENSE file for details
import sublime
import sublime_plugin
from ..anaconda_lib.helpers import get_settings
from ..anaconda_lib.helpers import valid_languages
from ..anaconda_lib.linting.sublime import ANACONDA
class AnacondaGetLines(sublime_plugin.WindowCommand):
"""Get a quickpanel with all the errors and lines ready to jump to them
"""
def run(self):
errors = {}
self._harvest_errors(errors, 'ERRORS')
self._harvest_errors(errors, 'WARNINGS')
self._harvest_errors(errors, 'VIOLATIONS')
if len(errors) > 0:
self.options = []
for line, error_strings in errors.items():
for msg in error_strings:
self.options.append([msg, 'line: {}'.format(line)])
self.window.show_quick_panel(self.options, self._jump)
def is_enabled(self):
"""Determines if the command is enabled
"""
view = self.window.active_view()
if (view.file_name() in ANACONDA['DISABLED']
or not get_settings(view, 'anaconda_linting')):
return False
location = view.sel()[0].begin()
for lang in valid_languages():
matcher = 'source.{}'.format(lang)
if view.match_selector(location, matcher) is True:
return True
return False
def _harvest_errors(self, harvester, error_type):
vid = self.window.active_view().id()
for line, error_strings in ANACONDA[error_type].get(vid, {}).items():
if line not in harvester:
harvester[line] = []
for error in error_strings:<|fim▁hole|>
def _jump(self, item):
"""Jump to a line in the view buffer
"""
if item == -1:
return
lineno = int(self.options[item][1].split(':')[1].strip())
pt = self.window.active_view().text_point(lineno, 0)
self.window.active_view().sel().clear()
self.window.active_view().sel().add(sublime.Region(pt))
self.window.active_view().show(pt)<|fim▁end|> | harvester[line].append(error) |
<|file_name|>test.py<|end_file_name|><|fim▁begin|>from webtest import TestApp
import helloworld
import os
def test_index():<|fim▁hole|> assert 'Hello world!' in str(response)<|fim▁end|> | test_index.testbed.init_taskqueue_stub(task_retry_seconds=42, root_path=os.path.dirname(__file__))
app = TestApp(helloworld.app)
# fires off a task queue and should pass without exceptions
response = app.get('/') |
<|file_name|>geometry.test.js<|end_file_name|><|fim▁begin|>"use strict";
var mapnik = require('../');
var assert = require('assert');
var path = require('path');
mapnik.register_datasource(path.join(mapnik.settings.paths.input_plugins,'geojson.input'));
describe('mapnik.Geometry ', function() {
it('should throw with invalid usage', function() {
// geometry cannot be created directly for now
assert.throws(function() { mapnik.Geometry(); });
});
it('should access a geometry from a feature', function() {
var feature = new mapnik.Feature(1);
var point = {
"type": "MultiPoint",
"coordinates": [[0,0],[1,1]]
};
var input = {
type: "Feature",
properties: {},
geometry: point
};
var f = new mapnik.Feature.fromJSON(JSON.stringify(input));
var geom = f.geometry();
assert.equal(geom.type(),mapnik.Geometry.MultiPoint);
assert.deepEqual(JSON.parse(geom.toJSONSync()),point);
var expected_wkb = new Buffer('0104000000020000000101000000000000000000000000000000000000000101000000000000000000f03f000000000000f03f', 'hex');
assert.deepEqual(geom.toWKB(),expected_wkb);
});
it('should fail on toJSON due to bad parameters', function() {
var feature = new mapnik.Feature(1);
var point = {
"type": "MultiPoint",
"coordinates": [[0,0],[1,1]]
};
var input = {
type: "Feature",
properties: {},
geometry: point
};
var f = new mapnik.Feature.fromJSON(JSON.stringify(input));
var geom = f.geometry();
assert.equal(geom.type(),mapnik.Geometry.MultiPoint);
assert.throws(function() { geom.toJSONSync(null); });
assert.throws(function() { geom.toJSONSync({transform:null}); });
assert.throws(function() { geom.toJSONSync({transform:{}}); });
assert.throws(function() { geom.toJSON(null, function(err,json) {}); });
assert.throws(function() { geom.toJSON({transform:null}, function(err, json) {}); });
assert.throws(function() { geom.toJSON({transform:{}}, function(err, json) {}); });
});
it('should throw if we attempt to create a Feature from a geojson geometry (rather than geojson feature)', function() {
var geometry = {
type: 'Point',
coordinates: [ 7.415119300000001, 43.730364300000005 ]
};
// starts throwing, as expected, at Mapnik v3.0.9 (https://github.com/mapnik/node-mapnik/issues/560)<|fim▁hole|> });
}
});
it('should throw from empty geometry from toWKB', function() {
var s = new mapnik.Feature(1);
assert.throws(function() {
var geom = s.geometry().toWKB();
});
});
});<|fim▁end|> | if (mapnik.versions.mapnik_number >= 300009) {
assert.throws(function() {
var transformed = mapnik.Feature.fromJSON(JSON.stringify(geometry)); |
<|file_name|>prc_aproximacao.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
--------------------------------------------------------------------------------------------------
prc_aproximacao
procedimento de aproximação de acordo com o aeródromo e pista estabelecidos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
revision 0.2 2016/oct mlabru
pep8 style conventions
revision 0.1 2015/nov mlabru
initial version (Linux/Python)
--------------------------------------------------------------------------------------------------
"""
__version__ = "$revision: 0.2$"
__author__ = "Milton Abrunhosa"
__date__ = "2016/10"
# < imports >--------------------------------------------------------------------------------------
# python library
import logging
# control
# import control.control_debug as dbg
# model
import model.newton.defs_newton as ldefs
import model.newton.cine.abort_prc as abnd
import model.newton.cine.obtem_brk as obrk
import model.newton.cine.prc_dir_ponto as dp
import model.newton.cine.trata_associado as tass
import model.newton.cine.sentido_curva as scrv
# -------------------------------------------------------------------------------------------------
def __obtem_apx_per(f_atv, f_apx):
"""
obtém a aproximação perdida
@param f_atv: pointer to aeronave
@param f_apx: pointer to aproximação
@return True se encontrou a aproximação perdida, senão False (inexistente)
"""
# check input
assert f_atv
assert f_apx
# aproximação perdida ok ?
if (f_apx.ptr_apx_prc_ape is not None) and (f_apx.ptr_apx_prc_ape.v_prc_ok):
# inicia campo procedimento da aeronave com posição da ApxPerdida
f_atv.ptr_trf_prc = f_apx.ptr_apx_prc_ape
# aeródromo e pista estabelecidos existem. retorna sucesso na pesquisa
return True
# retorna condição de falha na pesquisa
return False
# ------------------------------------------------------------------------------------------------
def __obtem_ils(f_atv, f_apx):
"""
o procedimento ILS
@param f_atv: pointer to aeronave
@param f_apx: pointer to aproximação
@return True se encontrou o ILS, senão False (inexistente)
"""
# check input
assert f_atv
assert f_apx
# ILS ok ?
if (f_apx.ptr_apx_prc_ils is not None) and (f_apx.ptr_apx_prc_ils.v_prc_ok):
# inicia campo procedimento da aeronave com posição do ILS
f_atv.ptr_trf_prc = f_apx.ptr_apx_prc_ils
# aeródromo e a pista estabelecidos existem. retorna sucesso na pesquisa
return True
# retorna condição de falha na pesquisa
return False
# ------------------------------------------------------------------------------------------------
def __obtem_pouso(f_atv, f_apx):
"""
obtém o Pouso
@param f_atv: pointer to aeronave
@param f_apx: pointer to aproximação
@return True se encontrou o Pouso, senão False (inexistente)
"""
# check input
assert f_atv
assert f_apx
# pista de pouso ok ?
if (f_apx.ptr_apx_pis is not None) and (f_apx.ptr_apx_pis.v_pst_ok):
# ângulo mínimo para o pouso
# i_pst_rumo (mlabru)
lf_ang = abs(f_atv.f_trf_pro_atu - f_apx.ptr_apx_pis.f_pst_true)
# tem condições de fazer pouso direto ?
if lf_ang <= 15.:
# inicia a nova fase na aproximação
f_atv.en_atv_fase = ldefs.E_FASE_APXALINHAR
# estabelece a proa a ser atingida (rumo da pista)
# i_pst_rumo (mlabru)
f_atv.f_atv_pro_dem = f_apx.ptr_apx_pis.f_pst_true
# inicia a curva pelo menor lado
scrv.sentido_curva(f_atv)
# pointer do aeródromo
f_atv.ptr_atv_aer = f_apx.ptr_apx_aer
# pointer da pista
f_atv.ptr_atv_pst = f_apx.ptr_apx_pis
# coloca em procedimento de pouso
f_atv.en_trf_fnc_ope = ldefs.E_POUSO
# volta para fase inicial do procedimento de aproximação OU fase inicial do pouso
f_atv.en_atv_fase = ldefs.E_FASE_ZERO
# retorna sucesso na pesquisa
return True
# retorna condição de falha na pesquisa
return False
# -------------------------------------------------------------------------------------------------
def prc_aproximacao(f_atv, f_cine_data, f_stk_context):
"""
realiza o procedimento de aproximação
@param f_atv: pointer to aeronave
@param f_cine_data: dados da cinemática
@param f_stk_context: pointer to stack
"""
# check input
assert f_atv
# active flight ?
if (not f_atv.v_atv_ok) or (ldefs.E_ATIVA != f_atv.en_trf_est_atv):
# logger
l_log = logging.getLogger("prc_aproximacao")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E01: aeronave não ativa.")
# abort procedure
abnd.abort_prc(f_atv)
# cai fora...
return
# performance ok ?
if (f_atv.ptr_trf_prf is None) or (not f_atv.ptr_trf_prf.v_prf_ok):
# logger
l_log = logging.getLogger("prc_aproximacao")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E02: performance não existe.")
# abort procedure
abnd.abort_prc(f_atv)
# cai fora...
return
# pointer to aproximação
l_apx = f_atv.ptr_trf_prc
# aproximação ok ?
if (l_apx is None) or (not l_apx.v_prc_ok):
# logger
l_log = logging.getLogger("prc_aproximacao")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E03: aproximação inexistente. aeronave:[{}/{}].".format(f_atv.i_trf_id, f_atv.s_trf_ind))
# abort procedure
abnd.abort_prc(f_atv)
# return
return
# variáveis locais
l_brk = None
# fase de preparação dos dados para o procedimento ?
if ldefs.E_FASE_ZERO == f_atv.en_atv_fase:
# inicia o index de breakpoints
f_cine_data.i_brk_ndx = 0
# inicia com dados do primeiro breakpoint<|fim▁hole|> # breakpoint ok ?
if (l_brk is None) or (not l_brk.v_brk_ok):
# logger
l_log = logging.getLogger("prc_aproximacao")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E04: fase zero. apx/breakpoint inexistente. aeronave:[{}/{}].".format(f_atv.i_trf_id, f_atv.s_trf_ind))
# abort procedure
abnd.abort_prc(f_atv)
# return
return
# obtém dados do breakpoint
obrk.obtem_brk(f_atv, l_brk, f_cine_data)
# fase de direcionamento aos breakpoints do procedimento ?
elif ldefs.E_FASE_DIRPONTO == f_atv.en_atv_fase:
# interceptou o breakpoint ?
if dp.prc_dir_ponto(f_atv, f_cine_data.f_coord_x_brk, f_cine_data.f_coord_y_brk, f_cine_data):
# se não houver um procedimento associado, faz uma espera, senão executa o procedimento
f_atv.en_atv_fase = ldefs.E_FASE_ESPERA if f_atv.ptr_atv_brk is not None else ldefs.E_FASE_ASSOCIADO
# fase rumo e altitude ?
elif ldefs.E_FASE_RUMOALT == f_atv.en_atv_fase:
# atingiu a proa e a altitude de demanda estabelecidas ?
if (f_atv.f_trf_pro_atu == f_atv.f_atv_pro_dem) and (f_atv.f_trf_alt_atu == f_atv.f_atv_alt_dem):
# se não houver um procedimento associado, faz uma espera, senão executa o procedimento
f_atv.en_atv_fase = ldefs.E_FASE_ESPERA if f_atv.ptr_atv_brk is not None else ldefs.E_FASE_ASSOCIADO
# fase de espera ? (mantém a aeronave em orbita até alcançar a altitude do breakpoint)
elif ldefs.E_FASE_ESPERA == f_atv.en_atv_fase:
# dados do breakpoint
l_brk = f_atv.ptr_atv_brk
assert l_brk
# NÃO atingiu a altitude do breakpoint ?
if f_atv.f_trf_alt_atu != l_brk.f_brk_alt:
# obtém dados do breakpoint (Espera com altitude de demanda)
obrk.obtem_brk(f_atv, l_brk, f_cine_data)
# empilha o contexto atual devido a mudança na função operacional
f_stk_context.append((f_atv.en_trf_fnc_ope, ldefs.E_FASE_ASSOCIADO, f_atv.ptr_trf_prc, f_atv.ptr_atv_brk, f_cine_data.i_brk_ndx))
# salva a função operacional atual
f_atv.en_trf_fnc_ope_ant = ldefs.E_APROXIMACAO
# estabelece a nova função operacional e a nova fase por não ter atingido a altitude do breakpoint
f_atv.en_trf_fnc_ope = ldefs.E_ESPERA
f_atv.en_atv_fase = ldefs.E_FASE_ZERO
f_atv.ptr_trf_prc = l_apx.ptr_apx_prc_esp
# otherwise, atingiu a altitude do breakpoint...
else:
# estabelece nova velocidade de demanda e sinaliza nova fase
f_atv.f_atv_vel_dem = f_atv.ptr_trf_prf.f_prf_vel_apx
f_atv.en_atv_fase = ldefs.E_FASE_ASSOCIADO
# fase associado ? (identifica se houve encadeamento de outros procedimentos)
elif ldefs.E_FASE_ASSOCIADO == f_atv.en_atv_fase:
# dados do breakpoint
l_brk = f_atv.ptr_atv_brk
assert l_brk
# sinaliza nova fase
f_atv.en_atv_fase = ldefs.E_FASE_BREAKPOINT
# existe procedimento associado (APX, APE, TRJ, ESP...) ao breakpoint ?
if tass.trata_associado(f_atv, l_brk, f_cine_data.i_brk_ndx, f_stk_context):
# é o último breakpoint da aproximação atual ?
if f_atv.ptr_atv_brk == l_apx.lst_apx_brk[-1]:
f_cine_data.i_brk_ndx -= 1
# já passou por todos os breakpoints ?
elif ldefs.E_FASE_BREAKPOINT == f_atv.en_atv_fase:
# é o último breakpoint da aproximação atual ?
if f_atv.ptr_atv_brk == l_apx.lst_apx_brk[-1]:
# possível ILS ?
if l_apx.ptr_apx_prc_ils is not None:
# ils ok ?
if __obtem_ils(f_atv, l_apx):
# coloca em procedimento de ILS
f_atv.en_trf_fnc_ope = ldefs.E_ILS
f_atv.en_atv_fase = ldefs.E_FASE_ZERO
# otherwise, ils not ok...
else:
# coloca em manual
f_atv.en_trf_fnc_ope = ldefs.E_MANUAL
# pode fazer aproximação perdida caso não esteja em condições para aproximação ?
if l_apx.ptr_apx_prc_ape is not None:
# dados do breakpoint
l_brk = f_atv.ptr_atv_brk
assert l_brk
# está em condição de pouso ?
if (abs(f_atv.f_trf_alt_atu - l_brk.f_brk_alt) <= 0.01) and (abs(f_atv.f_trf_vel_atu - f_atv.ptr_trf_prf.f_prf_vel_apx) <= 0.01):
# pouso ok ?
if not __obtem_pouso(f_atv, l_apx):
# coloca em manual
f_atv.en_trf_fnc_ope = ldefs.E_MANUAL
# otherwise, NÃO está em condição de pouso...
else:
# aproximação perdida ok ?
if __obtem_apx_per(f_atv, l_apx):
# prepara para procedimento de aproximação perdida
f_atv.en_trf_fnc_ope = ldefs.E_APXPERDIDA
f_atv.en_atv_fase = ldefs.E_FASE_ZERO
# otherwise, aproximação perdida not ok...
else:
# coloca em manual
f_atv.en_trf_fnc_ope = ldefs.E_MANUAL
# otherwise, NÃO pode fazer aproximação perdida nem ILS, faz pouso forçado...
else:
# pouso ok ?
if not __obtem_pouso(f_atv, l_apx):
# coloca em manual
f_atv.en_trf_fnc_ope = ldefs.E_MANUAL
# otherwise, não é o último breakpoint
else:
# próximo breakpoint
f_cine_data.i_brk_ndx += 1
# aponta para o próximo breakpoint
l_brk = f_atv.ptr_atv_brk = l_apx.lst_apx_brk[f_cine_data.i_brk_ndx]
# breakpoint ok ?
if (l_brk is None) or (not l_brk.v_brk_ok):
# logger
l_log = logging.getLogger("prc_aproximacao")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E05: fase breakpoint. apx/breakpoint inexistente. aeronave:[{}/{}].".format(f_atv.i_trf_id, f_atv.s_trf_ind))
# abort procedure
abnd.abort_prc(f_atv)
# apx/breakpoint inexistente. cai fora...
return
# obtém dados do breakpoint
obrk.obtem_brk(f_atv, l_brk, f_cine_data)
# otherwise,...
else:
# logger
l_log = logging.getLogger("prc_aproximacao")
l_log.setLevel(logging.ERROR)
l_log.error(u"<E06: fase da aproximação não identificada. fase:[{}].".format(ldefs.DCT_FASE[f_atv.en_atv_fase]))
# < the end >--------------------------------------------------------------------------------------<|fim▁end|> | l_brk = f_atv.ptr_atv_brk = l_apx.lst_apx_brk[0]
|
<|file_name|>hamtask.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from ..provider.g5k import G5K
from constants import SYMLINK_NAME
from functools import wraps
import os
import yaml
import logging
def load_env():
env = {
'config' : {}, # The config
'resultdir': '', # Path to the result directory
'config_file' : '', # The initial config file
'nodes' : {}, # Roles with nodes
'phase' : '', # Last phase that have been run
'user' : '', # User id for this job
'kolla_repo': 'https://git.openstack.org/openstack/kolla',
'kolla_branch': 'stable/newton'
}
# Loads the previously saved environment (if any)
env_path = os.path.join(SYMLINK_NAME, 'env')
if os.path.isfile(env_path):
with open(env_path, 'r') as f:
env.update(yaml.load(f))
logging.debug("Reloaded config %s", env['config'])
# Resets the configuration of the environment
if os.path.isfile(env['config_file']):
with open(env['config_file'], 'r') as f:
env['config'].update(yaml.load(f))
logging.debug("Reloaded config %s", env['config'])
return env
def save_env(env):
env_path = os.path.join(env['resultdir'], 'env')
if os.path.isdir(env['resultdir']):
with open(env_path, 'w') as f:
yaml.dump(env, f)
def hamtask(doc):
"""Decorator for a Ham Task."""
def decorator(fn):
fn.__doc__ = doc
@wraps(fn)
def decorated(*args, **kwargs):
# TODO: Dynamically loads the provider
if kwargs.has_key('--provider'):
provider_name = kwargs['--provider']<|fim▁hole|> env = load_env()
kwargs['env'] = env
# Proceeds with the function executio
fn(*args, **kwargs)
# Save the environment
save_env(env)
return decorated
return decorator<|fim▁end|> | kwargs['provider'] = G5K()
# Loads the environment & set the config |
<|file_name|>kinesis_stream.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: kinesis_stream
short_description: Manage a Kinesis Stream.
description:
- Create or Delete a Kinesis Stream.
- Update the retention period of a Kinesis Stream.
- Update Tags on a Kinesis Stream.
- Enable/disable server side encryption on a Kinesis Stream.
version_added: "2.2"
requirements: [ boto3 ]
author: Allen Sanabria (@linuxdynasty)
options:
name:
description:
- The name of the Kinesis Stream you are managing.
required: true
type: str
shards:
description:
- The number of shards you want to have with this stream.
- This is required when I(state=present)
type: int
retention_period:
description:
- The length of time (in hours) data records are accessible after they are added to
the stream.
- The default retention period is 24 hours and can not be less than 24 hours.
- The maximum retention period is 168 hours.
- The retention period can be modified during any point in time.
type: int
state:
description:
- Create or Delete the Kinesis Stream.
default: present
choices: [ 'present', 'absent' ]
type: str
wait:
description:
- Wait for operation to complete before returning.
default: true
type: bool
wait_timeout:
description:
- How many seconds to wait for an operation to complete before timing out.
default: 300
type: int
tags:
description:
- "A dictionary of resource tags of the form: C({ tag1: value1, tag2: value2 })."
aliases: [ "resource_tags" ]
type: dict
encryption_state:
description:
- Enable or Disable encryption on the Kinesis Stream.
choices: [ 'enabled', 'disabled' ]
version_added: "2.5"
type: str
encryption_type:
description:
- The type of encryption.
- Defaults to C(KMS)
choices: ['KMS', 'NONE']
version_added: "2.5"
type: str
key_id:
description:
- The GUID or alias for the KMS key.
version_added: "2.5"
type: str
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Basic creation example:
- name: Set up Kinesis Stream with 10 shards and wait for the stream to become ACTIVE
kinesis_stream:
name: test-stream
shards: 10
wait: yes
wait_timeout: 600
register: test_stream
# Basic creation example with tags:
- name: Set up Kinesis Stream with 10 shards, tag the environment, and wait for the stream to become ACTIVE
kinesis_stream:
name: test-stream
shards: 10
tags:
Env: development
wait: yes
wait_timeout: 600
register: test_stream
# Basic creation example with tags and increase the retention period from the default 24 hours to 48 hours:
- name: Set up Kinesis Stream with 10 shards, tag the environment, increase the retention period and wait for the stream to become ACTIVE
kinesis_stream:
name: test-stream
retention_period: 48
shards: 10
tags:
Env: development
wait: yes
wait_timeout: 600
register: test_stream
# Basic delete example:
- name: Delete Kinesis Stream test-stream and wait for it to finish deleting.
kinesis_stream:
name: test-stream
state: absent
wait: yes
wait_timeout: 600
register: test_stream
# Basic enable encryption example:
- name: Encrypt Kinesis Stream test-stream.
kinesis_stream:
name: test-stream
state: present
encryption_state: enabled
encryption_type: KMS
key_id: alias/aws/kinesis
wait: yes
wait_timeout: 600
register: test_stream
# Basic disable encryption example:
- name: Encrypt Kinesis Stream test-stream.
kinesis_stream:
name: test-stream
state: present
encryption_state: disabled
encryption_type: KMS
key_id: alias/aws/kinesis
wait: yes
wait_timeout: 600
register: test_stream
'''
RETURN = '''
stream_name:
description: The name of the Kinesis Stream.
returned: when state == present.
type: str
sample: "test-stream"
stream_arn:
description: The amazon resource identifier
returned: when state == present.
type: str
sample: "arn:aws:kinesis:east-side:123456789:stream/test-stream"
stream_status:
description: The current state of the Kinesis Stream.
returned: when state == present.
type: str
sample: "ACTIVE"
retention_period_hours:
description: Number of hours messages will be kept for a Kinesis Stream.
returned: when state == present.
type: int
sample: 24
tags:
description: Dictionary containing all the tags associated with the Kinesis stream.
returned: when state == present.
type: dict
sample: {
"Name": "Splunk",
"Env": "development"
}
'''
import re
import datetime
import time
from functools import reduce
try:
import botocore.exceptions
except ImportError:
pass # Taken care of by ec2.HAS_BOTO3
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO3, boto3_conn, ec2_argument_spec, get_aws_connection_info
from ansible.module_utils._text import to_native
def convert_to_lower(data):
"""Convert all uppercase keys in dict with lowercase_
Args:
data (dict): Dictionary with keys that have upper cases in them
Example.. FooBar == foo_bar
if a val is of type datetime.datetime, it will be converted to
the ISO 8601
Basic Usage:
>>> test = {'FooBar': []}
>>> test = convert_to_lower(test)
{
'foo_bar': []
}
Returns:
Dictionary
"""
results = dict()
if isinstance(data, dict):
for key, val in data.items():
key = re.sub(r'(([A-Z]{1,3}){1})', r'_\1', key).lower()
if key[0] == '_':
key = key[1:]
if isinstance(val, datetime.datetime):
results[key] = val.isoformat()
elif isinstance(val, dict):
results[key] = convert_to_lower(val)
elif isinstance(val, list):
converted = list()
for item in val:
converted.append(convert_to_lower(item))
results[key] = converted
else:
results[key] = val
return results
def make_tags_in_proper_format(tags):
"""Take a dictionary of tags and convert them into the AWS Tags format.
Args:
tags (list): The tags you want applied.
Basic Usage:
>>> tags = [{'Key': 'env', 'Value': 'development'}]
>>> make_tags_in_proper_format(tags)
{
"env": "development",
}
Returns:
Dict
"""
formatted_tags = dict()
for tag in tags:
formatted_tags[tag.get('Key')] = tag.get('Value')
return formatted_tags
def make_tags_in_aws_format(tags):
"""Take a dictionary of tags and convert them into the AWS Tags format.
Args:
tags (dict): The tags you want applied.
Basic Usage:
>>> tags = {'env': 'development', 'service': 'web'}
>>> make_tags_in_proper_format(tags)
[
{
"Value": "web",
"Key": "service"
},
{
"Value": "development",
"key": "env"
}
]
Returns:
List
"""
formatted_tags = list()
for key, val in tags.items():
formatted_tags.append({
'Key': key,
'Value': val
})
return formatted_tags
def get_tags(client, stream_name, check_mode=False):
"""Retrieve the tags for a Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): Name of the Kinesis stream.
Kwargs:
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>> get_tags(client, stream_name)
Returns:
Tuple (bool, str, dict)
"""
err_msg = ''
success = False
params = {
'StreamName': stream_name,
}
results = dict()
try:
if not check_mode:
results = (
client.list_tags_for_stream(**params)['Tags']
)
else:
results = [
{
'Key': 'DryRunMode',
'Value': 'true'
},
]
success = True
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
return success, err_msg, results
def find_stream(client, stream_name, check_mode=False):
"""Retrieve a Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): Name of the Kinesis stream.
Kwargs:
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
Returns:
Tuple (bool, str, dict)
"""
err_msg = ''
success = False
params = {
'StreamName': stream_name,
}
results = dict()
has_more_shards = True
shards = list()
try:
if not check_mode:
while has_more_shards:
results = (
client.describe_stream(**params)['StreamDescription']
)
shards.extend(results.pop('Shards'))
has_more_shards = results['HasMoreShards']
results['Shards'] = shards
num_closed_shards = len([s for s in shards if 'EndingSequenceNumber' in s['SequenceNumberRange']])
results['OpenShardsCount'] = len(shards) - num_closed_shards
results['ClosedShardsCount'] = num_closed_shards
results['ShardsCount'] = len(shards)
else:
results = {
'OpenShardsCount': 5,
'ClosedShardsCount': 0,
'ShardsCount': 5,
'HasMoreShards': True,
'RetentionPeriodHours': 24,
'StreamName': stream_name,
'StreamARN': 'arn:aws:kinesis:east-side:123456789:stream/{0}'.format(stream_name),
'StreamStatus': 'ACTIVE',
'EncryptionType': 'NONE'
}
success = True
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
return success, err_msg, results
def wait_for_status(client, stream_name, status, wait_timeout=300,
check_mode=False):
"""Wait for the status to change for a Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client
stream_name (str): The name of the kinesis stream.
status (str): The status to wait for.
examples. status=available, status=deleted
Kwargs:
wait_timeout (int): Number of seconds to wait, until this timeout is reached.
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> wait_for_status(client, stream_name, 'ACTIVE', 300)
Returns:
Tuple (bool, str, dict)
"""
polling_increment_secs = 5
wait_timeout = time.time() + wait_timeout
status_achieved = False
stream = dict()
err_msg = ""
while wait_timeout > time.time():
try:
find_success, find_msg, stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if check_mode:
status_achieved = True
break
elif status != 'DELETING':
if find_success and stream:
if stream.get('StreamStatus') == status:
status_achieved = True
break
else:
if not find_success:
status_achieved = True
break
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
time.sleep(polling_increment_secs)
if not status_achieved:
err_msg = "Wait time out reached, while waiting for results"
else:
err_msg = "Status {0} achieved successfully".format(status)
return status_achieved, err_msg, stream
def tags_action(client, stream_name, tags, action='create', check_mode=False):
"""Create or delete multiple tags from a Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
resource_id (str): The Amazon resource id.
tags (list): List of dictionaries.
examples.. [{Name: "", Values: [""]}]
Kwargs:
action (str): The action to perform.
valid actions == create and delete
default=create
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('ec2')
>>> resource_id = 'pcx-123345678'
>>> tags = {'env': 'development'}
>>> update_tags(client, resource_id, tags)
[True, '']
Returns:
List (bool, str)
"""
success = False
err_msg = ""
params = {'StreamName': stream_name}
try:
if not check_mode:
if action == 'create':
params['Tags'] = tags
client.add_tags_to_stream(**params)
success = True
elif action == 'delete':
params['TagKeys'] = list(tags)
client.remove_tags_from_stream(**params)
success = True
else:
err_msg = 'Invalid action {0}'.format(action)
else:
if action == 'create':
success = True
elif action == 'delete':
success = True
else:
err_msg = 'Invalid action {0}'.format(action)
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
return success, err_msg
def recreate_tags_from_list(list_of_tags):
"""Recreate tags from a list of tuples into the Amazon Tag format.
Args:
list_of_tags (list): List of tuples.
Basic Usage:
>>> list_of_tags = [('Env', 'Development')]
>>> recreate_tags_from_list(list_of_tags)
[
{
"Value": "Development",
"Key": "Env"
}
]
Returns:
List
"""
tags = list()
i = 0
for i in range(len(list_of_tags)):
key_name = list_of_tags[i][0]
key_val = list_of_tags[i][1]
tags.append(
{
'Key': key_name,
'Value': key_val
}
)
return tags
def update_tags(client, stream_name, tags, check_mode=False):
"""Update tags for an amazon resource.
Args:
resource_id (str): The Amazon resource id.
tags (dict): Dictionary of tags you want applied to the Kinesis stream.
Kwargs:
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('ec2')
>>> stream_name = 'test-stream'
>>> tags = {'env': 'development'}
>>> update_tags(client, stream_name, tags)
[True, '']
Return:
Tuple (bool, str)
"""
success = False
changed = False
err_msg = ''
tag_success, tag_msg, current_tags = (
get_tags(client, stream_name, check_mode=check_mode)
)
if current_tags:
tags = make_tags_in_aws_format(tags)
current_tags_set = (
set(
reduce(
lambda x, y: x + y,
[make_tags_in_proper_format(current_tags).items()]
)
)
)
new_tags_set = (
set(
reduce(
lambda x, y: x + y,
[make_tags_in_proper_format(tags).items()]
)
)
)
tags_to_delete = list(current_tags_set.difference(new_tags_set))
tags_to_update = list(new_tags_set.difference(current_tags_set))
if tags_to_delete:
tags_to_delete = make_tags_in_proper_format(
recreate_tags_from_list(tags_to_delete)
)
delete_success, delete_msg = (
tags_action(
client, stream_name, tags_to_delete, action='delete',
check_mode=check_mode
)
)
if not delete_success:
return delete_success, changed, delete_msg
if tags_to_update:
tags = make_tags_in_proper_format(
recreate_tags_from_list(tags_to_update)
)
else:
return True, changed, 'Tags do not need to be updated'
if tags:
create_success, create_msg = (
tags_action(
client, stream_name, tags, action='create',
check_mode=check_mode
)
)
if create_success:
changed = True
return create_success, changed, create_msg
return success, changed, err_msg
def stream_action(client, stream_name, shard_count=1, action='create',
timeout=300, check_mode=False):
"""Create or Delete an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
shard_count (int): Number of shards this stream will use.
action (str): The action to perform.
valid actions == create and delete
default=create
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> shard_count = 20
>>> stream_action(client, stream_name, shard_count, action='create')
Returns:
List (bool, str)
"""
success = False
err_msg = ''
params = {
'StreamName': stream_name
}
try:
if not check_mode:
if action == 'create':
params['ShardCount'] = shard_count
client.create_stream(**params)
success = True
elif action == 'delete':
client.delete_stream(**params)
success = True
else:
err_msg = 'Invalid action {0}'.format(action)
else:
if action == 'create':
success = True
elif action == 'delete':
success = True
else:
err_msg = 'Invalid action {0}'.format(action)
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
return success, err_msg
def stream_encryption_action(client, stream_name, action='start_encryption', encryption_type='', key_id='',
timeout=300, check_mode=False):
"""Create, Encrypt or Delete an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
shard_count (int): Number of shards this stream will use.
action (str): The action to perform.
valid actions == create and delete
default=create
encryption_type (str): NONE or KMS
key_id (str): The GUID or alias for the KMS key
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> shard_count = 20
>>> stream_action(client, stream_name, shard_count, action='create', encryption_type='KMS',key_id='alias/aws')
Returns:
List (bool, str)
"""
success = False
err_msg = ''
params = {
'StreamName': stream_name
}
try:
if not check_mode:
if action == 'start_encryption':
params['EncryptionType'] = encryption_type
params['KeyId'] = key_id
client.start_stream_encryption(**params)
success = True
elif action == 'stop_encryption':
params['EncryptionType'] = encryption_type
params['KeyId'] = key_id
client.stop_stream_encryption(**params)
success = True
else:
err_msg = 'Invalid encryption action {0}'.format(action)
else:
if action == 'start_encryption':
success = True
elif action == 'stop_encryption':
success = True
else:
err_msg = 'Invalid encryption action {0}'.format(action)
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
return success, err_msg
def retention_action(client, stream_name, retention_period=24,
action='increase', check_mode=False):
"""Increase or Decrease the retention of messages in the Kinesis stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
retention_period (int): This is how long messages will be kept before
they are discarded. This can not be less than 24 hours.
action (str): The action to perform.
valid actions == create and delete
default=create
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> retention_period = 48
>>> retention_action(client, stream_name, retention_period, action='increase')
Returns:
Tuple (bool, str)
"""
success = False
err_msg = ''
params = {
'StreamName': stream_name
}
try:
if not check_mode:
if action == 'increase':
params['RetentionPeriodHours'] = retention_period
client.increase_stream_retention_period(**params)
success = True
err_msg = (
'Retention Period increased successfully to {0}'.format(retention_period)
)
elif action == 'decrease':
params['RetentionPeriodHours'] = retention_period
client.decrease_stream_retention_period(**params)
success = True
err_msg = (
'Retention Period decreased successfully to {0}'.format(retention_period)
)
else:
err_msg = 'Invalid action {0}'.format(action)
else:
if action == 'increase':
success = True
elif action == 'decrease':
success = True
else:
err_msg = 'Invalid action {0}'.format(action)
except botocore.exceptions.ClientError as e:
err_msg = to_native(e)
return success, err_msg
def update_shard_count(client, stream_name, number_of_shards=1, check_mode=False):
"""Increase or Decrease the number of shards in the Kinesis stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
number_of_shards (int): Number of shards this stream will use.
default=1
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> number_of_shards = 3
>>> update_shard_count(client, stream_name, number_of_shards)
Returns:
Tuple (bool, str)
"""
success = True
err_msg = ''
params = {
'StreamName': stream_name,
'ScalingType': 'UNIFORM_SCALING'
}
if not check_mode:
params['TargetShardCount'] = number_of_shards
try:
client.update_shard_count(**params)
except botocore.exceptions.ClientError as e:
return False, str(e)
return success, err_msg
def update(client, current_stream, stream_name, number_of_shards=1, retention_period=None,
tags=None, wait=False, wait_timeout=300, check_mode=False):
"""Update an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
number_of_shards (int): Number of shards this stream will use.
default=1
retention_period (int): This is how long messages will be kept before
they are discarded. This can not be less than 24 hours.
tags (dict): The tags you want applied.
wait (bool): Wait until Stream is ACTIVE.
default=False
wait_timeout (int): How long to wait until this operation is considered failed.
default=300
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> current_stream = {
'ShardCount': 3,
'HasMoreShards': True,
'RetentionPeriodHours': 24,
'StreamName': 'test-stream',
'StreamARN': 'arn:aws:kinesis:us-west-2:123456789:stream/test-stream',
'StreamStatus': "ACTIVE'
}
>>> stream_name = 'test-stream'
>>> retention_period = 48
>>> number_of_shards = 10
>>> update(client, current_stream, stream_name,
number_of_shards, retention_period )
Returns:
Tuple (bool, bool, str)
"""
success = True
changed = False
err_msg = ''
if retention_period:
if wait:
wait_success, wait_msg, current_stream = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
if not wait_success:
return wait_success, False, wait_msg
if current_stream.get('StreamStatus') == 'ACTIVE':
retention_changed = False
if retention_period > current_stream['RetentionPeriodHours']:
retention_changed, retention_msg = (
retention_action(
client, stream_name, retention_period, action='increase',
check_mode=check_mode
)
)
elif retention_period < current_stream['RetentionPeriodHours']:
retention_changed, retention_msg = (
retention_action(
client, stream_name, retention_period, action='decrease',
check_mode=check_mode
)
)
elif retention_period == current_stream['RetentionPeriodHours']:
retention_msg = (
'Retention {0} is the same as {1}'
.format(
retention_period,
current_stream['RetentionPeriodHours']
)
)
success = True
if retention_changed:
success = True
changed = True
err_msg = retention_msg
if changed and wait:
wait_success, wait_msg, current_stream = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
if not wait_success:
return wait_success, False, wait_msg
elif changed and not wait:
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if stream_found:
if current_stream['StreamStatus'] != 'ACTIVE':
err_msg = (
'Retention Period for {0} is in the process of updating'
.format(stream_name)
)
return success, changed, err_msg
else:
err_msg = (
'StreamStatus has to be ACTIVE in order to modify the retention period. Current status is {0}'
.format(current_stream.get('StreamStatus', 'UNKNOWN'))
)
return success, changed, err_msg
if current_stream['OpenShardsCount'] != number_of_shards:
success, err_msg = (
update_shard_count(client, stream_name, number_of_shards, check_mode=check_mode)
)
if not success:
return success, changed, err_msg
changed = True
if wait:
wait_success, wait_msg, current_stream = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
if not wait_success:
return wait_success, changed, wait_msg
else:
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if stream_found and current_stream['StreamStatus'] != 'ACTIVE':
err_msg = (
'Number of shards for {0} is in the process of updating'
.format(stream_name)
)
return success, changed, err_msg
if tags:
tag_success, tag_changed, err_msg = (
update_tags(client, stream_name, tags, check_mode=check_mode)
)
if wait:
success, err_msg, status_stream = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
if success and changed:
err_msg = 'Kinesis Stream {0} updated successfully.'.format(stream_name)
elif success and not changed:
err_msg = 'Kinesis Stream {0} did not change.'.format(stream_name)
return success, changed, err_msg
def create_stream(client, stream_name, number_of_shards=1, retention_period=None,
tags=None, wait=False, wait_timeout=300, check_mode=False):
"""Create an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
number_of_shards (int): Number of shards this stream will use.
default=1
retention_period (int): Can not be less than 24 hours
default=None
tags (dict): The tags you want applied.
default=None
wait (bool): Wait until Stream is ACTIVE.
default=False
wait_timeout (int): How long to wait until this operation is considered failed.
default=300
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> number_of_shards = 10
>>> tags = {'env': 'test'}
>>> create_stream(client, stream_name, number_of_shards, tags=tags)
Returns:
Tuple (bool, bool, str, dict)
"""
success = False
changed = False
err_msg = ''
results = dict()<|fim▁hole|> )
if stream_found and current_stream.get('StreamStatus') == 'DELETING' and wait:
wait_success, wait_msg, current_stream = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
if stream_found and current_stream.get('StreamStatus') != 'DELETING':
success, changed, err_msg = update(
client, current_stream, stream_name, number_of_shards,
retention_period, tags, wait, wait_timeout, check_mode=check_mode
)
else:
create_success, create_msg = (
stream_action(
client, stream_name, number_of_shards, action='create',
check_mode=check_mode
)
)
if not create_success:
changed = True
err_msg = 'Failed to create Kinesis stream: {0}'.format(create_msg)
return False, True, err_msg, {}
else:
changed = True
if wait:
wait_success, wait_msg, results = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
err_msg = (
'Kinesis Stream {0} is in the process of being created'
.format(stream_name)
)
if not wait_success:
return wait_success, True, wait_msg, results
else:
err_msg = (
'Kinesis Stream {0} created successfully'
.format(stream_name)
)
if tags:
changed, err_msg = (
tags_action(
client, stream_name, tags, action='create',
check_mode=check_mode
)
)
if changed:
success = True
if not success:
return success, changed, err_msg, results
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if retention_period and current_stream.get('StreamStatus') == 'ACTIVE':
changed, err_msg = (
retention_action(
client, stream_name, retention_period, action='increase',
check_mode=check_mode
)
)
if changed:
success = True
if not success:
return success, changed, err_msg, results
else:
err_msg = (
'StreamStatus has to be ACTIVE in order to modify the retention period. Current status is {0}'
.format(current_stream.get('StreamStatus', 'UNKNOWN'))
)
success = create_success
changed = True
if success:
stream_found, stream_msg, results = (
find_stream(client, stream_name, check_mode=check_mode)
)
tag_success, tag_msg, current_tags = (
get_tags(client, stream_name, check_mode=check_mode)
)
if current_tags and not check_mode:
current_tags = make_tags_in_proper_format(current_tags)
results['Tags'] = current_tags
elif check_mode and tags:
results['Tags'] = tags
else:
results['Tags'] = dict()
results = convert_to_lower(results)
return success, changed, err_msg, results
def delete_stream(client, stream_name, wait=False, wait_timeout=300,
check_mode=False):
"""Delete an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
wait (bool): Wait until Stream is ACTIVE.
default=False
wait_timeout (int): How long to wait until this operation is considered failed.
default=300
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> delete_stream(client, stream_name)
Returns:
Tuple (bool, bool, str, dict)
"""
success = False
changed = False
err_msg = ''
results = dict()
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if stream_found:
success, err_msg = (
stream_action(
client, stream_name, action='delete', check_mode=check_mode
)
)
if success:
changed = True
if wait:
success, err_msg, results = (
wait_for_status(
client, stream_name, 'DELETING', wait_timeout,
check_mode=check_mode
)
)
err_msg = 'Stream {0} deleted successfully'.format(stream_name)
if not success:
return success, True, err_msg, results
else:
err_msg = (
'Stream {0} is in the process of being deleted'
.format(stream_name)
)
else:
success = True
changed = False
err_msg = 'Stream {0} does not exist'.format(stream_name)
return success, changed, err_msg, results
def start_stream_encryption(client, stream_name, encryption_type='', key_id='',
wait=False, wait_timeout=300, check_mode=False):
"""Start encryption on an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
encryption_type (str): KMS or NONE
key_id (str): KMS key GUID or alias
wait (bool): Wait until Stream is ACTIVE.
default=False
wait_timeout (int): How long to wait until this operation is considered failed.
default=300
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> key_id = 'alias/aws'
>>> encryption_type = 'KMS'
>>> start_stream_encryption(client, stream_name,encryption_type,key_id)
Returns:
Tuple (bool, bool, str, dict)
"""
success = False
changed = False
err_msg = ''
params = {
'StreamName': stream_name
}
results = dict()
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if stream_found:
success, err_msg = (
stream_encryption_action(
client, stream_name, action='start_encryption', encryption_type=encryption_type, key_id=key_id, check_mode=check_mode
)
)
if success:
changed = True
if wait:
success, err_msg, results = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
err_msg = 'Kinesis Stream {0} encryption started successfully.'.format(stream_name)
if not success:
return success, True, err_msg, results
else:
err_msg = (
'Kinesis Stream {0} is in the process of starting encryption.'.format(stream_name)
)
else:
success = True
changed = False
err_msg = 'Kinesis Stream {0} does not exist'.format(stream_name)
return success, changed, err_msg, results
def stop_stream_encryption(client, stream_name, encryption_type='', key_id='',
wait=True, wait_timeout=300, check_mode=False):
"""Stop encryption on an Amazon Kinesis Stream.
Args:
client (botocore.client.EC2): Boto3 client.
stream_name (str): The name of the kinesis stream.
Kwargs:
encryption_type (str): KMS or NONE
key_id (str): KMS key GUID or alias
wait (bool): Wait until Stream is ACTIVE.
default=False
wait_timeout (int): How long to wait until this operation is considered failed.
default=300
check_mode (bool): This will pass DryRun as one of the parameters to the aws api.
default=False
Basic Usage:
>>> client = boto3.client('kinesis')
>>> stream_name = 'test-stream'
>>> start_stream_encryption(client, stream_name,encryption_type, key_id)
Returns:
Tuple (bool, bool, str, dict)
"""
success = False
changed = False
err_msg = ''
params = {
'StreamName': stream_name
}
results = dict()
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode)
)
if stream_found:
if current_stream.get('EncryptionType') == 'KMS':
success, err_msg = (
stream_encryption_action(
client, stream_name, action='stop_encryption', key_id=key_id, encryption_type=encryption_type, check_mode=check_mode
)
)
elif current_stream.get('EncryptionType') == 'NONE':
success = True
if success:
changed = True
if wait:
success, err_msg, results = (
wait_for_status(
client, stream_name, 'ACTIVE', wait_timeout,
check_mode=check_mode
)
)
err_msg = 'Kinesis Stream {0} encryption stopped successfully.'.format(stream_name)
if not success:
return success, True, err_msg, results
else:
err_msg = (
'Stream {0} is in the process of stopping encryption.'.format(stream_name)
)
else:
success = True
changed = False
err_msg = 'Stream {0} does not exist.'.format(stream_name)
return success, changed, err_msg, results
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(required=True),
shards=dict(default=None, required=False, type='int'),
retention_period=dict(default=None, required=False, type='int'),
tags=dict(default=None, required=False, type='dict', aliases=['resource_tags']),
wait=dict(default=True, required=False, type='bool'),
wait_timeout=dict(default=300, required=False, type='int'),
state=dict(default='present', choices=['present', 'absent']),
encryption_type=dict(required=False, choices=['NONE', 'KMS']),
key_id=dict(required=False, type='str'),
encryption_state=dict(required=False, choices=['enabled', 'disabled']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
retention_period = module.params.get('retention_period')
stream_name = module.params.get('name')
shards = module.params.get('shards')
state = module.params.get('state')
tags = module.params.get('tags')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
encryption_type = module.params.get('encryption_type')
key_id = module.params.get('key_id')
encryption_state = module.params.get('encryption_state')
if state == 'present' and not shards:
module.fail_json(msg='Shards is required when state == present.')
if retention_period:
if retention_period < 24:
module.fail_json(msg='Retention period can not be less than 24 hours.')
if not HAS_BOTO3:
module.fail_json(msg='boto3 is required.')
check_mode = module.check_mode
try:
region, ec2_url, aws_connect_kwargs = (
get_aws_connection_info(module, boto3=True)
)
client = (
boto3_conn(
module, conn_type='client', resource='kinesis',
region=region, endpoint=ec2_url, **aws_connect_kwargs
)
)
except botocore.exceptions.ClientError as e:
err_msg = 'Boto3 Client Error - {0}'.format(to_native(e.msg))
module.fail_json(
success=False, changed=False, result={}, msg=err_msg
)
if state == 'present':
success, changed, err_msg, results = (
create_stream(
client, stream_name, shards, retention_period, tags,
wait, wait_timeout, check_mode
)
)
if encryption_state == 'enabled':
success, changed, err_msg, results = (
start_stream_encryption(
client, stream_name, encryption_type, key_id, wait, wait_timeout, check_mode
)
)
elif encryption_state == 'disabled':
success, changed, err_msg, results = (
stop_stream_encryption(
client, stream_name, encryption_type, key_id, wait, wait_timeout, check_mode
)
)
elif state == 'absent':
success, changed, err_msg, results = (
delete_stream(client, stream_name, wait, wait_timeout, check_mode)
)
if success:
module.exit_json(
success=success, changed=changed, msg=err_msg, **results
)
else:
module.fail_json(
success=success, changed=changed, msg=err_msg, result=results
)
if __name__ == '__main__':
main()<|fim▁end|> |
stream_found, stream_msg, current_stream = (
find_stream(client, stream_name, check_mode=check_mode) |
<|file_name|>baidumap.js<|end_file_name|><|fim▁begin|>/*******************************************************************************
* KindEditor - WYSIWYG HTML Editor for Internet
* Copyright (C) 2006-2011 kindsoft.net
*
* @author Roddy <[email protected]>
* @site http://www.kindsoft.net/
* @licence http://www.kindsoft.net/license.php
*******************************************************************************/
// Baidu Maps: http://dev.baidu.com/wiki/map/index.php?title=%E9%A6%96%E9%A1%B5
KindEditor.plugin('baidumap', function (K) {
var self = this, name = 'baidumap', lang = self.lang(name + '.');
var mapWidth = K.undef(self.mapWidth, 558);
var mapHeight = K.undef(self.mapHeight, 360);
self.clickToolbar(name, function () {
var html = ['<div style="padding:10px 20px;">',
'<div class="ke-header">',
// left start
'<div class="ke-left">',
lang.address + ' <input id="kindeditor_plugin_map_address" name="address" class="ke-input-text" value="" style="width:200px;" /> ',
'<span class="ke-button-common ke-button-outer">',
'<input type="button" name="searchBtn" class="ke-button-common ke-button" value="' + lang.search + '" />',
'</span>',
'</div>',
// right start
'<div class="ke-right">',
'<input type="checkbox" id="keInsertDynamicMap" name="insertDynamicMap" value="1" /> <label for="keInsertDynamicMap">' + lang.insertDynamicMap + '</label>',
'</div>',
'<div class="ke-clearfix"></div>',
'</div>',
'<div class="ke-map" style="width:' + mapWidth + 'px;height:' + mapHeight + 'px;"></div>',
'</div>'].join('');
var dialog = self.createDialog({
name: name,
width: mapWidth + 42,
title: self.lang(name),
body: html,
yesBtn: {
name: self.lang('yes'),
click: function (e) {
var map = win.map;
var centerObj = map.getCenter();
var center = centerObj.lng + ',' + centerObj.lat;
var zoom = map.getZoom();
var url = [checkbox[0].checked ? self.pluginsPath + 'baidumap/index.html' : 'http://api.map.baidu.com/staticimage',
'?center=' + encodeURIComponent(center),
'&zoom=' + encodeURIComponent(zoom),
'&width=' + mapWidth,
'&height=' + mapHeight,
'&markers=' + encodeURIComponent(center),
'&markerStyles=' + encodeURIComponent('l,A')].join('');
if (checkbox[0].checked) {
self.insertHtml('<iframe src="' + url + '" frameborder="0" style="width:' + (mapWidth + 2) + 'px;height:' + (mapHeight + 2) + 'px;"></iframe>');
} else {
self.exec('insertimage', url);
}
self.hideDialog().focus();
}
},
beforeRemove: function () {
searchBtn.remove();
if (doc) {
doc.write('');
}
iframe.remove();
}
});<|fim▁hole|> var div = dialog.div,
addressBox = K('[name="address"]', div),
searchBtn = K('[name="searchBtn"]', div),
checkbox = K('[name="insertDynamicMap"]', dialog.div),
win, doc;
var iframe = K('<iframe class="ke-textarea" frameborder="0" src="' + self.pluginsPath + 'baidumap/map.html" style="width:' + mapWidth + 'px;height:' + mapHeight + 'px;"></iframe>');
function ready() {
win = iframe[0].contentWindow;
doc = K.iframeDoc(iframe);
}
iframe.bind('load', function () {
iframe.unbind('load');
if (K.IE) {
ready();
} else {
setTimeout(ready, 0);
}
});
K('.ke-map', div).replaceWith(iframe);
// search map
searchBtn.click(function () {
win.search(addressBox.val());
});
});
});<|fim▁end|> | |
<|file_name|>test_l3_agent.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import functools
import os.path
import mock
import netaddr
from oslo_config import cfg
from oslo_log import log as logging
import testtools
import webob
import webob.dec
import webob.exc
from neutron.agent.common import config as agent_config
from neutron.agent.common import ovs_lib
from neutron.agent.l3 import agent as neutron_l3_agent
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import namespace_manager
from neutron.agent.l3 import namespaces
from neutron.agent import l3_agent as l3_agent_main
from neutron.agent.linux import dhcp
from neutron.agent.linux import external_process
from neutron.agent.linux import ip_lib
from neutron.agent.linux import utils
from neutron.callbacks import events
from neutron.callbacks import manager
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.common import config as common_config
from neutron.common import constants as l3_constants
from neutron.common import utils as common_utils
from neutron.openstack.common import uuidutils
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.linux import base
from neutron.tests.functional.agent.linux import helpers
from neutron.tests.unit.agent.l3 import test_agent as test_l3_agent
LOG = logging.getLogger(__name__)
_uuid = uuidutils.generate_uuid
METADATA_REQUEST_TIMEOUT = 60
def get_ovs_bridge(br_name):
return ovs_lib.OVSBridge(br_name)
class L3AgentTestFramework(base.BaseLinuxTestCase):
def setUp(self):
super(L3AgentTestFramework, self).setUp()
mock.patch('neutron.agent.l3.agent.L3PluginApi').start()
# TODO(pcm): Move this to BaseTestCase, if we find that more tests
# use this mechanism.
self._callback_manager = manager.CallbacksManager()
mock.patch.object(registry, '_get_callback_manager',
return_value=self._callback_manager).start()
self.agent = self._configure_agent('agent1')
def _get_config_opts(self):
config = cfg.ConfigOpts()
config.register_opts(common_config.core_opts)
config.register_opts(common_config.core_cli_opts)
logging.register_options(config)
agent_config.register_process_monitor_opts(config)
return config
def _configure_agent(self, host):
conf = self._get_config_opts()
l3_agent_main.register_opts(conf)
cfg.CONF.set_override('debug', False)
agent_config.setup_logging()
conf.set_override(
'interface_driver',
'neutron.agent.linux.interface.OVSInterfaceDriver')
conf.set_override('router_delete_namespaces', True)
br_int = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
br_ex = self.useFixture(net_helpers.OVSBridgeFixture()).bridge
conf.set_override('ovs_integration_bridge', br_int.br_name)
conf.set_override('external_network_bridge', br_ex.br_name)
temp_dir = self.get_new_temp_dir()
get_temp_file_path = functools.partial(self.get_temp_file_path,
root=temp_dir)
conf.set_override('state_path', temp_dir.path)
conf.set_override('metadata_proxy_socket',
get_temp_file_path('metadata_proxy'))
conf.set_override('ha_confs_path',
get_temp_file_path('ha_confs'))
conf.set_override('external_pids',
get_temp_file_path('external/pids'))
conf.set_override('host', host)
agent = neutron_l3_agent.L3NATAgentWithStateReport(host, conf)
mock.patch.object(ip_lib, '_arping').start()
return agent
def generate_router_info(self, enable_ha, ip_version=4, extra_routes=True,
enable_fip=True, enable_snat=True,
dual_stack=False, v6_ext_gw_with_sub=True):
if ip_version == 6 and not dual_stack:
enable_snat = False
enable_fip = False
extra_routes = False
if not v6_ext_gw_with_sub:
self.agent.conf.set_override('ipv6_gateway',
'fe80::f816:3eff:fe2e:1')
return test_l3_agent.prepare_router_data(ip_version=ip_version,
enable_snat=enable_snat,
enable_floating_ip=enable_fip,
enable_ha=enable_ha,
extra_routes=extra_routes,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
def manage_router(self, agent, router):
self.addCleanup(self._delete_router, agent, router['id'])
ri = self._create_router(agent, router)
return ri
def _create_router(self, agent, router):
agent._process_added_router(router)
return agent.router_info[router['id']]
def _delete_router(self, agent, router_id):
agent._router_removed(router_id)
def _add_fip(self, router, fip_address, fixed_address='10.0.0.2',
host=None):
fip = {'id': _uuid(),
'port_id': _uuid(),
'floating_ip_address': fip_address,
'fixed_ip_address': fixed_address,
'host': host}
router.router[l3_constants.FLOATINGIP_KEY].append(fip)
def _add_internal_interface_by_subnet(self, router, count=1,
ip_version=4,
ipv6_subnet_modes=None,
interface_id=None):
return test_l3_agent.router_append_subnet(router, count,
ip_version, ipv6_subnet_modes, interface_id)
def _namespace_exists(self, namespace):
ip = ip_lib.IPWrapper(namespace=namespace)
return ip.netns.exists(namespace)
def _metadata_proxy_exists(self, conf, router):
pm = external_process.ProcessManager(
conf,
router.router_id,
router.ns_name)
return pm.active
def device_exists_with_ips_and_mac(self, expected_device, name_getter,
namespace):
ip_cidrs = common_utils.fixed_ip_cidrs(expected_device['fixed_ips'])
return ip_lib.device_exists_with_ips_and_mac(
name_getter(expected_device['id']), ip_cidrs,
expected_device['mac_address'], namespace)
@staticmethod
def _port_first_ip_cidr(port):
fixed_ip = port['fixed_ips'][0]
return common_utils.ip_to_cidr(fixed_ip['ip_address'],
fixed_ip['prefixlen'])
def get_device_mtu(self, target_device, name_getter, namespace):
device = ip_lib.IPDevice(name_getter(target_device), namespace)
return device.link.mtu
def get_expected_keepalive_configuration(self, router):
router_id = router.router_id
ha_device_name = router.get_ha_device_name()
ha_device_cidr = self._port_first_ip_cidr(router.ha_port)
external_port = router.get_ex_gw_port()
ex_port_ipv6 = ip_lib.get_ipv6_lladdr(external_port['mac_address'])
external_device_name = router.get_external_device_name(
external_port['id'])
external_device_cidr = self._port_first_ip_cidr(external_port)
internal_port = router.router[l3_constants.INTERFACE_KEY][0]
int_port_ipv6 = ip_lib.get_ipv6_lladdr(internal_port['mac_address'])
internal_device_name = router.get_internal_device_name(
internal_port['id'])
internal_device_cidr = self._port_first_ip_cidr(internal_port)
floating_ip_cidr = common_utils.ip_to_cidr(
router.get_floating_ips()[0]['floating_ip_address'])
default_gateway_ip = external_port['subnets'][0].get('gateway_ip')
return """vrrp_instance VR_1 {
state BACKUP
interface %(ha_device_name)s
virtual_router_id 1
priority 50
nopreempt
advert_int 2
track_interface {
%(ha_device_name)s
}
virtual_ipaddress {
169.254.0.1/24 dev %(ha_device_name)s
}
virtual_ipaddress_excluded {
%(floating_ip_cidr)s dev %(external_device_name)s
%(external_device_cidr)s dev %(external_device_name)s
%(internal_device_cidr)s dev %(internal_device_name)s
%(ex_port_ipv6)s dev %(external_device_name)s scope link
%(int_port_ipv6)s dev %(internal_device_name)s scope link
}
virtual_routes {
0.0.0.0/0 via %(default_gateway_ip)s dev %(external_device_name)s
8.8.8.0/24 via 19.4.4.4
}
}""" % {
'router_id': router_id,
'ha_device_name': ha_device_name,
'ha_device_cidr': ha_device_cidr,
'external_device_name': external_device_name,
'external_device_cidr': external_device_cidr,
'internal_device_name': internal_device_name,
'internal_device_cidr': internal_device_cidr,
'floating_ip_cidr': floating_ip_cidr,
'default_gateway_ip': default_gateway_ip,
'int_port_ipv6': int_port_ipv6,
'ex_port_ipv6': ex_port_ipv6
}
def _get_rule(self, iptables_manager, table, chain, predicate):
rules = iptables_manager.get_chain(table, chain)
result = next(rule for rule in rules if predicate(rule))
return result
def _assert_router_does_not_exist(self, router):
# If the namespace assertion succeeds
# then the devices and iptable rules have also been deleted,
# so there's no need to check that explicitly.
self.assertFalse(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: not self._metadata_proxy_exists(self.agent.conf, router))
def _assert_snat_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'snat'))
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'POSTROUTING'))
def _assert_floating_ip_chains(self, router):
self.assertFalse(router.iptables_manager.is_chain_empty(
'nat', 'float-snat'))
def _assert_metadata_chains(self, router):
metadata_port_filter = lambda rule: (
str(self.agent.conf.metadata_port) in rule.rule)
self.assertTrue(self._get_rule(router.iptables_manager,
'nat',
'PREROUTING',
metadata_port_filter))
self.assertTrue(self._get_rule(router.iptables_manager,
'filter',
'INPUT',
metadata_port_filter))
def _assert_internal_devices(self, router):
internal_devices = router.router[l3_constants.INTERFACE_KEY]
self.assertTrue(len(internal_devices))
for device in internal_devices:
self.assertTrue(self.device_exists_with_ips_and_mac(
device, router.get_internal_device_name, router.ns_name))
def _assert_extra_routes(self, router):
routes = ip_lib.get_routing_table(namespace=router.ns_name)
routes = [{'nexthop': route['nexthop'],
'destination': route['destination']} for route in routes]
for extra_route in router.router['routes']:
self.assertIn(extra_route, routes)
def _assert_interfaces_deleted_from_ovs(self):
def assert_ovs_bridge_empty(bridge_name):
bridge = ovs_lib.OVSBridge(bridge_name)
self.assertFalse(bridge.get_port_name_list())
assert_ovs_bridge_empty(self.agent.conf.ovs_integration_bridge)
assert_ovs_bridge_empty(self.agent.conf.external_network_bridge)
def floating_ips_configured(self, router):
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
external_port = router.get_ex_gw_port()
return len(floating_ips) and all(
ip_lib.device_exists_with_ips_and_mac(
router.get_external_device_name(external_port['id']),
['%s/32' % fip['floating_ip_address']],
external_port['mac_address'],
namespace=router.ns_name) for fip in floating_ips)
def fail_ha_router(self, router):
device_name = router.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, router.ns_name)
ha_device.link.set_down()
class L3AgentTestCase(L3AgentTestFramework):
def test_keepalived_state_change_notification(self):
enqueue_mock = mock.patch.object(
self.agent, 'enqueue_state_change').start()
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router.ha_state == 'master')
self.fail_ha_router(router)
utils.wait_until_true(lambda: router.ha_state == 'backup')
utils.wait_until_true(lambda: enqueue_mock.call_count == 3)
calls = [args[0] for args in enqueue_mock.call_args_list]
self.assertEqual((router.router_id, 'backup'), calls[0])
self.assertEqual((router.router_id, 'master'), calls[1])
self.assertEqual((router.router_id, 'backup'), calls[2])
def _expected_rpc_report(self, expected):
calls = (args[0][1] for args in
self.agent.plugin_rpc.update_ha_routers_states.call_args_list)
# Get the last state reported for each router
actual_router_states = {}
for call in calls:
for router_id, state in call.iteritems():
actual_router_states[router_id] = state
return actual_router_states == expected
def test_keepalived_state_change_bulk_rpc(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self.fail_ha_router(router1)
router_info = self.generate_router_info(enable_ha=True)
router2 = self.manage_router(self.agent, router_info)
utils.wait_until_true(lambda: router1.ha_state == 'backup')
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(
lambda: self._expected_rpc_report(
{router1.router_id: 'standby', router2.router_id: 'active'}))
def test_agent_notifications_for_router_events(self):
"""Test notifications for router create, update, and delete.
Make sure that when the agent sends notifications of router events
for router create, update, and delete, that the correct handler is
called with the right resource, event, and router information.
"""
event_handler = mock.Mock()
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_CREATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_UPDATE)
registry.subscribe(event_handler,
resources.ROUTER, events.BEFORE_DELETE)
registry.subscribe(event_handler,
resources.ROUTER, events.AFTER_DELETE)
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self.agent._process_updated_router(router.router)
self._delete_router(self.agent, router.router_id)
expected_calls = [
mock.call('router', 'before_create', self.agent, router=router),
mock.call('router', 'after_create', self.agent, router=router),
mock.call('router', 'before_update', self.agent, router=router),
mock.call('router', 'after_update', self.agent, router=router),
mock.call('router', 'before_delete', self.agent, router=router),
mock.call('router', 'after_delete', self.agent, router=router)]
event_handler.assert_has_calls(expected_calls)
def test_legacy_router_lifecycle(self):
self._router_lifecycle(enable_ha=False, dual_stack=True)
def test_legacy_router_lifecycle_with_no_gateway_subnet(self):
self._router_lifecycle(enable_ha=False, dual_stack=True,
v6_ext_gw_with_sub=False)
def test_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True)
def test_conntrack_disassociate_fip(self):
'''Test that conntrack immediately drops stateful connection
that uses floating IP once it's disassociated.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
port = helpers.get_free_namespace_port(router.ns_name)
client_address = '19.4.4.3'
server_address = '35.4.0.4'
def clean_fips(router):
router.router[l3_constants.FLOATINGIP_KEY] = []
clean_fips(router)
self._add_fip(router, client_address, fixed_address=server_address)
router.process(self.agent)
router_ns = ip_lib.IPWrapper(namespace=router.ns_name)
netcat = helpers.NetcatTester(router_ns, router_ns,
server_address, port,
client_address=client_address,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
def assert_num_of_conntrack_rules(n):
out = router_ns.netns.execute(["conntrack", "-L",
"--orig-src", client_address])
self.assertEqual(
n, len([line for line in out.strip().split('\n') if line]))
with self.assert_max_execution_time(100):
assert_num_of_conntrack_rules(0)
self.assertTrue(netcat.test_connectivity())
assert_num_of_conntrack_rules(1)
clean_fips(router)
router.process(self.agent)
assert_num_of_conntrack_rules(0)
with testtools.ExpectedException(RuntimeError):
netcat.test_connectivity()
def test_ipv6_ha_router_lifecycle(self):
self._router_lifecycle(enable_ha=True, ip_version=6)
def test_keepalived_configuration(self):
router_info = self.generate_router_info(enable_ha=True)
router = self.manage_router(self.agent, router_info)
expected = self.get_expected_keepalive_configuration(router)
self.assertEqual(expected,
router.keepalived_manager.get_conf_on_disk())
# Add a new FIP and change the GW IP address
router.router = copy.deepcopy(router.router)
existing_fip = '19.4.4.2'
new_fip = '19.4.4.3'
self._add_fip(router, new_fip)
subnet_id = _uuid()
fixed_ips = [{'ip_address': '19.4.4.10',
'prefixlen': 24,
'subnet_id': subnet_id}]
subnets = [{'id': subnet_id,
'cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.5'}]
router.router['gw_port']['subnets'] = subnets
router.router['gw_port']['fixed_ips'] = fixed_ips
router.process(self.agent)
# Get the updated configuration and assert that both FIPs are in,
# and that the GW IP address was updated.
new_config = router.keepalived_manager.config.get_config_str()
old_gw = '0.0.0.0/0 via 19.4.4.1'
new_gw = '0.0.0.0/0 via 19.4.4.5'
old_external_device_ip = '19.4.4.4'
new_external_device_ip = '19.4.4.10'
self.assertIn(existing_fip, new_config)
self.assertIn(new_fip, new_config)
self.assertNotIn(old_gw, new_config)
self.assertIn(new_gw, new_config)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
self.assertNotIn('%s/24 dev %s' %
(old_external_device_ip, external_device_name),
new_config)
self.assertIn('%s/24 dev %s' %
(new_external_device_ip, external_device_name),
new_config)
def test_periodic_sync_routers_task(self):
routers_to_keep = []
routers_to_delete = []
ns_names_to_retrieve = set()
for i in range(2):
routers_to_keep.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_keep[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_keep[i]['id'])
for i in range(2):
routers_to_delete.append(self.generate_router_info(False))
self.manage_router(self.agent, routers_to_delete[i])
ns_names_to_retrieve.add(namespaces.NS_PREFIX +
routers_to_delete[i]['id'])
# Mock the plugin RPC API to Simulate a situation where the agent
# was handling the 4 routers created above, it went down and after
# starting up again, two of the routers were deleted via the API
mocked_get_routers = (
neutron_l3_agent.L3PluginApi.return_value.get_routers)
mocked_get_routers.return_value = routers_to_keep
# Synchonize the agent with the plug-in
with mock.patch.object(namespace_manager.NamespaceManager, 'list_all',
return_value=ns_names_to_retrieve):
self.agent.periodic_sync_routers_task(self.agent.context)
# Mock the plugin RPC API so a known external network id is returned
# when the router updates are processed by the agent
external_network_id = _uuid()
mocked_get_external_network_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_get_external_network_id.return_value = external_network_id
# Plug external_gateway_info in the routers that are not going to be
# deleted by the agent when it processes the updates. Otherwise,
# _process_router_if_compatible in the agent fails
for i in range(2):
routers_to_keep[i]['external_gateway_info'] = {'network_id':
external_network_id}
# Have the agent process the update from the plug-in and verify
# expected behavior
for _ in routers_to_keep + routers_to_delete:
self.agent._process_router_update()
for i in range(2):
self.assertIn(routers_to_keep[i]['id'], self.agent.router_info)
self.assertTrue(self._namespace_exists(namespaces.NS_PREFIX +
routers_to_keep[i]['id']))
for i in range(2):
self.assertNotIn(routers_to_delete[i]['id'],
self.agent.router_info)
self.assertFalse(self._namespace_exists(
namespaces.NS_PREFIX + routers_to_delete[i]['id']))
def _router_lifecycle(self, enable_ha, ip_version=4,
dual_stack=False, v6_ext_gw_with_sub=True):
router_info = self.generate_router_info(enable_ha, ip_version,
dual_stack=dual_stack,
v6_ext_gw_with_sub=(
v6_ext_gw_with_sub))
router = self.manage_router(self.agent, router_info)
# Add multiple-IPv6-prefix internal router port
slaac = l3_constants.IPV6_SLAAC
slaac_mode = {'ra_mode': slaac, 'address_mode': slaac}
subnet_modes = [slaac_mode] * 2
self._add_internal_interface_by_subnet(router.router, count=2,
ip_version=6, ipv6_subnet_modes=subnet_modes)
router.process(self.agent)
if enable_ha:
port = router.get_ex_gw_port()
interface_name = router.get_external_device_name(port['id'])
self._assert_no_ip_addresses_on_interface(router.ns_name,
interface_name)
utils.wait_until_true(lambda: router.ha_state == 'master')
# Keepalived notifies of a state transition when it starts,
# not when it ends. Thus, we have to wait until keepalived finishes
# configuring everything. We verify this by waiting until the last
# device has an IP address.
device = router.router[l3_constants.INTERFACE_KEY][-1]
device_exists = functools.partial(
self.device_exists_with_ips_and_mac,
device,
router.get_internal_device_name,
router.ns_name)
utils.wait_until_true(device_exists)
self.assertTrue(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_external_device(router)
if not (enable_ha and (ip_version == 6 or dual_stack)):
# Note(SridharG): enable the assert_gateway for IPv6 once
# keepalived on Ubuntu14.04 (i.e., check-neutron-dsvm-functional
# platform) is updated to 1.2.10 (or above).
# For more details: https://review.openstack.org/#/c/151284/
self._assert_gateway(router, v6_ext_gw_with_sub)
self.assertTrue(self.floating_ips_configured(router))
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_extra_routes(router)
self._assert_metadata_chains(router)
if enable_ha:
self._assert_ha_device(router)
self.assertTrue(router.keepalived_manager.get_process().active)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
if enable_ha:
self.assertFalse(router.keepalived_manager.get_process().active)
def _assert_external_device(self, router):
external_port = router.get_ex_gw_port()
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
router.ns_name))
def _assert_gateway(self, router, v6_ext_gw_with_sub=True):
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=router.ns_name)
for subnet in external_port['subnets']:
self._gateway_check(subnet['gateway_ip'], external_device)
if not v6_ext_gw_with_sub:
self._gateway_check(self.agent.conf.ipv6_gateway,
external_device)
def _gateway_check(self, gateway_ip, external_device):
expected_gateway = gateway_ip
ip_vers = netaddr.IPAddress(expected_gateway).version
existing_gateway = (external_device.route.get_gateway(
ip_version=ip_vers).get('gateway'))
self.assertEqual(expected_gateway, existing_gateway)
def _assert_ha_device(self, router):
def ha_router_dev_name_getter(not_used):
return router.get_ha_device_name()
self.assertTrue(self.device_exists_with_ips_and_mac(
router.router[l3_constants.HA_INTERFACE_KEY],
ha_router_dev_name_getter, router.ns_name))
@classmethod
def _get_addresses_on_device(cls, namespace, interface):
return [address['cidr'] for address in
ip_lib.IPDevice(interface, namespace=namespace).addr.list()]
def _assert_no_ip_addresses_on_interface(self, namespace, interface):
self.assertEqual(
[], self._get_addresses_on_device(namespace, interface))
def test_ha_router_conf_on_restarted_agent(self):
router_info = self.generate_router_info(enable_ha=True)
router1 = self.manage_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12')
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
self._create_router(restarted_agent, router1.router)
utils.wait_until_true(lambda: self.floating_ips_configured(router1))
self.assertIn(
router1._get_primary_vip(),
self._get_addresses_on_device(
router1.ns_name,
router1.get_ha_device_name()))
def test_fip_connection_from_same_subnet(self):
'''Test connection to floatingip which is associated with
fixed_ip on the same subnet of the source fixed_ip.
In other words it confirms that return packets surely
go through the router.
'''
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
router_ip = router_ip_cidr.partition('/')[0]
src_ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
dst_ip_cidr = net_helpers.increment_ip_cidr(src_ip_cidr)
dst_ip = dst_ip_cidr.partition('/')[0]
dst_fip = '19.4.4.10'
router.router[l3_constants.FLOATINGIP_KEY] = []
self._add_fip(router, dst_fip, fixed_address=dst_ip)
router.process(self.agent)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by fake machines
src_ns = self._create_namespace(prefix='test-src-')
src_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, src_ns.namespace)).port
src_port.addr.add(src_ip_cidr)
net_helpers.set_namespace_gateway(src_port, router_ip)
dst_ns = self._create_namespace(prefix='test-dst-')
dst_port = self.useFixture(
net_helpers.OVSPortFixture(br_int, dst_ns.namespace)).port
dst_port.addr.add(dst_ip_cidr)
net_helpers.set_namespace_gateway(dst_port, router_ip)
protocol_port = helpers.get_free_namespace_port(dst_ns)
# client sends to fip
netcat = helpers.NetcatTester(src_ns, dst_ns, dst_ip,
protocol_port,
client_address=dst_fip,
run_as_root=True,
udp=False)
self.addCleanup(netcat.stop_processes)
self.assertTrue(netcat.test_connectivity())
class L3HATestFramework(L3AgentTestFramework):
NESTED_NAMESPACE_SEPARATOR = '@'
def setUp(self):
super(L3HATestFramework, self).setUp()
self.failover_agent = self._configure_agent('agent2')
br_int_1 = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
br_int_2 = get_ovs_bridge(
self.failover_agent.conf.ovs_integration_bridge)
veth1, veth2 = self.create_veth()
br_int_1.add_port(veth1.name)
br_int_2.add_port(veth2.name)
def test_ha_router_failover(self):
router_info = self.generate_router_info(enable_ha=True)
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router1 = self.manage_router(self.agent, router_info)
router_info_2 = copy.deepcopy(router_info)
router_info_2[l3_constants.HA_INTERFACE_KEY] = (
test_l3_agent.get_ha_interface(ip='169.254.192.2',
mac='22:22:22:22:22:22'))
ns_name = "%s%s%s" % (
namespaces.RouterNamespace._get_ns_name(router_info_2['id']),
self.NESTED_NAMESPACE_SEPARATOR, self.failover_agent.host)
mock.patch.object(namespaces.RouterNamespace, '_get_ns_name',
return_value=ns_name).start()
router2 = self.manage_router(self.failover_agent, router_info_2)
utils.wait_until_true(lambda: router1.ha_state == 'master')
utils.wait_until_true(lambda: router2.ha_state == 'backup')
device_name = router1.get_ha_device_name()
ha_device = ip_lib.IPDevice(device_name, namespace=router1.ns_name)
ha_device.link.set_down()
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(lambda: router1.ha_state == 'backup')
class MetadataFakeProxyHandler(object):
def __init__(self, status):
self.status = status
@webob.dec.wsgify()
def __call__(self, req):
return webob.Response(status=self.status)
class MetadataL3AgentTestCase(L3AgentTestFramework):
SOCKET_MODE = 0o644
def _create_metadata_fake_server(self, status):
server = utils.UnixDomainWSGIServer('metadata-fake-server')
self.addCleanup(server.stop)
# NOTE(cbrandily): TempDir fixture creates a folder with 0o700
# permissions but metadata_proxy_socket folder must be readable by all
# users
self.useFixture(
helpers.RecursivePermDirFixture(
os.path.dirname(self.agent.conf.metadata_proxy_socket), 0o555))
server.start(MetadataFakeProxyHandler(status),
self.agent.conf.metadata_proxy_socket,
workers=0, backlog=4096, mode=self.SOCKET_MODE)
def test_access_to_metadata_proxy(self):
"""Test access to the l3-agent metadata proxy.
The test creates:
* A l3-agent metadata service:
* A router (which creates a metadata proxy in the router namespace),
* A fake metadata server
* A "client" namespace (simulating a vm) with a port on router
internal subnet.
The test queries from the "client" namespace the metadata proxy on
http://169.254.169.254 and asserts that the metadata proxy added
the X-Forwarded-For and X-Neutron-Router-Id headers to the request
and forwarded the http request to the fake metadata server and the
response to the "client" namespace.
"""
router_info = self.generate_router_info(enable_ha=False)
router = self.manage_router(self.agent, router_info)
self._create_metadata_fake_server(webob.exc.HTTPOk.code)
# Create and configure client namespace
client_ns = self._create_namespace()
router_ip_cidr = self._port_first_ip_cidr(router.internal_ports[0])
ip_cidr = net_helpers.increment_ip_cidr(router_ip_cidr)
br_int = get_ovs_bridge(self.agent.conf.ovs_integration_bridge)
# FIXME(cbrandily): temporary, will be replaced by a fake machine
port = self.useFixture(
net_helpers.OVSPortFixture(br_int, client_ns.namespace)).port
port.addr.add(ip_cidr)
net_helpers.set_namespace_gateway(port,
router_ip_cidr.partition('/')[0])
# Query metadata proxy
url = 'http://%(host)s:%(port)s' % {'host': dhcp.METADATA_DEFAULT_IP,
'port': dhcp.METADATA_PORT}
cmd = 'curl', '--max-time', METADATA_REQUEST_TIMEOUT, '-D-', url
try:
raw_headers = client_ns.netns.execute(cmd)
except RuntimeError:
self.fail('metadata proxy unreachable on %s before timeout' % url)
# Check status code
firstline = raw_headers.splitlines()[0]
self.assertIn(str(webob.exc.HTTPOk.code), firstline.split())
class UnprivilegedUserMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
"""
<|fim▁hole|> self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class UnprivilegedUserGroupMetadataL3AgentTestCase(MetadataL3AgentTestCase):
"""Test metadata proxy with least privileged user/group.
The least privileged user has uid=65534 and is commonly named 'nobody' but
not always, that's why we use its uid.
Its group has gid=65534 and is commonly named 'nobody' or 'nogroup', that's
why we use its gid.
"""
SOCKET_MODE = 0o666
def setUp(self):
super(UnprivilegedUserGroupMetadataL3AgentTestCase, self).setUp()
self.agent.conf.set_override('metadata_proxy_user', '65534')
self.agent.conf.set_override('metadata_proxy_group', '65534')
self.agent.conf.set_override('metadata_proxy_watch_log', False)
class TestDvrRouter(L3AgentTestFramework):
def test_dvr_router_lifecycle_without_ha_without_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=False)
def test_dvr_router_lifecycle_without_ha_with_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=True)
def _helper_create_dvr_router_fips_for_ext_network(
self, agent_mode, **dvr_router_kwargs):
self.agent.conf.agent_mode = agent_mode
router_info = self.generate_dvr_router_info(**dvr_router_kwargs)
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
router = self.manage_router(self.agent, router_info)
fip_ns = router.fip_ns.get_name()
return router, fip_ns
def _validate_fips_for_external_network(self, router, fip_ns):
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_dvr_floating_ips(router)
self._assert_snat_namespace_does_not_exist(router)
def test_dvr_router_fips_for_multiple_ext_networks(self):
agent_mode = 'dvr'
# Create the first router fip with external net1
dvr_router1_kwargs = {'ip_address': '19.4.4.3',
'subnet_cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1',
'gateway_mac': 'ca:fe:de:ab:cd:ef'}
router1, fip1_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router1_kwargs))
# Validate the fip with external net1
self._validate_fips_for_external_network(router1, fip1_ns)
# Create the second router fip with external net2
dvr_router2_kwargs = {'ip_address': '19.4.5.3',
'subnet_cidr': '19.4.5.0/24',
'gateway_ip': '19.4.5.1',
'gateway_mac': 'ca:fe:de:ab:cd:fe'}
router2, fip2_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router2_kwargs))
# Validate the fip with external net2
self._validate_fips_for_external_network(router2, fip2_ns)
def _dvr_router_lifecycle(self, enable_ha=False, enable_snat=False,
custom_mtu=2000):
'''Test dvr router lifecycle
:param enable_ha: sets the ha value for the router.
:param enable_snat: the value of enable_snat is used
to set the agent_mode.
'''
# The value of agent_mode can be dvr, dvr_snat, or legacy.
# Since by definition this is a dvr (distributed = true)
# only dvr and dvr_snat are applicable
self.agent.conf.agent_mode = 'dvr_snat' if enable_snat else 'dvr'
self.agent.conf.network_device_mtu = custom_mtu
# We get the router info particular to a dvr router
router_info = self.generate_dvr_router_info(
enable_ha, enable_snat)
# We need to mock the get_agent_gateway_port return value
# because the whole L3PluginApi is mocked and we need the port
# gateway_port information before the l3_agent will create it.
# The port returned needs to have the same information as
# router_info['gw_port']
mocked_gw_port = (
neutron_l3_agent.L3PluginApi.return_value.get_agent_gateway_port)
mocked_gw_port.return_value = router_info['gw_port']
# We also need to mock the get_external_network_id method to
# get the correct fip namespace.
mocked_ext_net_id = (
neutron_l3_agent.L3PluginApi.return_value.get_external_network_id)
mocked_ext_net_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
# With all that set we can now ask the l3_agent to
# manage the router (create it, create namespaces,
# attach interfaces, etc...)
router = self.manage_router(self.agent, router_info)
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_dvr_external_device(router)
self._assert_dvr_gateway(router)
self._assert_dvr_floating_ips(router)
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router)
self._assert_metadata_chains(router)
self._assert_extra_routes(router)
self._assert_rfp_fpr_mtu(router, custom_mtu)
self._delete_router(self.agent, router.router_id)
self._assert_interfaces_deleted_from_ovs()
self._assert_router_does_not_exist(router)
def generate_dvr_router_info(
self, enable_ha=False, enable_snat=False, **kwargs):
router = test_l3_agent.prepare_router_data(
enable_snat=enable_snat,
enable_floating_ip=True,
enable_ha=enable_ha,
**kwargs)
internal_ports = router.get(l3_constants.INTERFACE_KEY, [])
router['distributed'] = True
router['gw_port_host'] = self.agent.conf.host
router['gw_port']['binding:host_id'] = self.agent.conf.host
floating_ip = router['_floatingips'][0]
floating_ip['floating_network_id'] = router['gw_port']['network_id']
floating_ip['host'] = self.agent.conf.host
floating_ip['port_id'] = internal_ports[0]['id']
floating_ip['status'] = 'ACTIVE'
self._add_snat_port_info_to_router(router, internal_ports)
# FIP has a dependency on external gateway. So we need to create
# the snat_port info and fip_agent_gw_port_info irrespective of
# the agent type the dvr supports. The namespace creation is
# dependent on the agent_type.
external_gw_port = router['gw_port']
self._add_fip_agent_gw_port_info_to_router(router, external_gw_port)
return router
def _add_fip_agent_gw_port_info_to_router(self, router, external_gw_port):
# Add fip agent gateway port information to the router_info
fip_gw_port_list = router.get(
l3_constants.FLOATINGIP_AGENT_INTF_KEY, [])
if not fip_gw_port_list and external_gw_port:
# Get values from external gateway port
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
fip_gw_port_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add floatingip agent gateway port info to router
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
router[l3_constants.FLOATINGIP_AGENT_INTF_KEY] = [
{'subnets': [
{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': 'network:floatingip_agent_gateway',
'mac_address': 'fa:16:3e:80:8d:89',
'binding:host_id': self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _add_snat_port_info_to_router(self, router, internal_ports):
# Add snat port information to the router
snat_port_list = router.get(l3_constants.SNAT_ROUTER_INTF_KEY, [])
if not snat_port_list and internal_ports:
# Get values from internal port
port = internal_ports[0]
fixed_ip = port['fixed_ips'][0]
snat_subnet = port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
snat_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add the info to router as the first snat port
# in the list of snat ports
prefixlen = netaddr.IPNetwork(snat_subnet['cidr']).prefixlen
router[l3_constants.SNAT_ROUTER_INTF_KEY] = [
{'subnets': [
{'cidr': snat_subnet['cidr'],
'gateway_ip': snat_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': port['network_id'],
'device_owner': 'network:router_centralized_snat',
'mac_address': 'fa:16:3e:80:8d:89',
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': snat_ip,
'prefixlen': prefixlen}],
'id': _uuid(),
'device_id': _uuid()}
]
def _assert_dvr_external_device(self, router):
external_port = router.get_ex_gw_port()
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
# if the agent is in dvr_snat mode, then we have to check
# that the correct ports and ip addresses exist in the
# snat_ns_name namespace
if self.agent.conf.agent_mode == 'dvr_snat':
self.assertTrue(self.device_exists_with_ips_and_mac(
external_port, router.get_external_device_name,
snat_ns_name))
# if the agent is in dvr mode then the snat_ns_name namespace
# should not be present at all:
elif self.agent.conf.agent_mode == 'dvr':
self.assertFalse(
self._namespace_exists(snat_ns_name),
"namespace %s was found but agent is in dvr mode not dvr_snat"
% (str(snat_ns_name))
)
# if the agent is anything else the test is misconfigured
# we force a test failure with message
else:
self.assertTrue(False, " agent not configured for dvr or dvr_snat")
def _assert_dvr_gateway(self, router):
gateway_expected_in_snat_namespace = (
self.agent.conf.agent_mode == 'dvr_snat'
)
if gateway_expected_in_snat_namespace:
self._assert_dvr_snat_gateway(router)
snat_namespace_should_not_exist = (
self.agent.conf.agent_mode == 'dvr'
)
if snat_namespace_should_not_exist:
self._assert_snat_namespace_does_not_exist(router)
def _assert_dvr_snat_gateway(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=namespace)
existing_gateway = (
external_device.route.get_gateway().get('gateway'))
expected_gateway = external_port['subnets'][0]['gateway_ip']
self.assertEqual(expected_gateway, existing_gateway)
def _assert_snat_namespace_does_not_exist(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self.assertFalse(self._namespace_exists(namespace))
def _assert_dvr_floating_ips(self, router):
# in the fip namespace:
# Check that the fg-<port-id> (floatingip_agent_gateway)
# is created with the ip address of the external gateway port
floating_ips = router.router[l3_constants.FLOATINGIP_KEY]
self.assertTrue(floating_ips)
# We need to fetch the floatingip agent gateway port info
# from the router_info
floating_agent_gw_port = (
router.router[l3_constants.FLOATINGIP_AGENT_INTF_KEY])
self.assertTrue(floating_agent_gw_port)
external_gw_port = floating_agent_gw_port[0]
fip_ns = self.agent.get_fip_ns(floating_ips[0]['floating_network_id'])
fip_ns_name = fip_ns.get_name()
fg_port_created_successfully = ip_lib.device_exists_with_ips_and_mac(
fip_ns.get_ext_device_name(external_gw_port['id']),
[self._port_first_ip_cidr(external_gw_port)],
external_gw_port['mac_address'],
namespace=fip_ns_name)
self.assertTrue(fg_port_created_successfully)
# Check fpr-router device has been created
device_name = fip_ns.get_int_device_name(router.router_id)
fpr_router_device_created_successfully = ip_lib.device_exists(
device_name, namespace=fip_ns_name)
self.assertTrue(fpr_router_device_created_successfully)
# In the router namespace
# Check rfp-<router-id> is created correctly
for fip in floating_ips:
device_name = fip_ns.get_rtr_ext_device_name(router.router_id)
self.assertTrue(ip_lib.device_exists(
device_name, namespace=router.ns_name))
def test_dvr_router_rem_fips_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self._create_router(self.agent, router_info)
self._add_fip(router1, '192.168.111.12', self.agent.conf.host)
fip_ns = router1.fip_ns.get_name()
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router[l3_constants.FLOATINGIP_KEY] = []
self._create_router(restarted_agent, router1.router)
self._assert_dvr_snat_gateway(router1)
self.assertFalse(self._namespace_exists(fip_ns))
def test_dvr_router_add_internal_network_set_arp_cache(self):
# Check that, when the router is set up and there are
# existing ports on the the uplinked subnet, the ARP
# cache is properly populated.
self.agent.conf.agent_mode = 'dvr_snat'
router_info = test_l3_agent.prepare_router_data()
router_info['distributed'] = True
expected_neighbor = '35.4.1.10'
port_data = {
'fixed_ips': [{'ip_address': expected_neighbor}],
'mac_address': 'fa:3e:aa:bb:cc:dd',
'device_owner': 'compute:None'
}
self.agent.plugin_rpc.get_ports_by_subnet.return_value = [port_data]
router1 = self._create_router(self.agent, router_info)
internal_device = router1.get_internal_device_name(
router_info['_interfaces'][0]['id'])
neighbors = ip_lib.IPDevice(internal_device, router1.ns_name).neigh
self.assertEqual(expected_neighbor, neighbors.show().split()[0])
def _assert_rfp_fpr_mtu(self, router, expected_mtu=1500):
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_rtr_ext_device_name,
router.ns_name)
self.assertEqual(expected_mtu, dev_mtu)
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_int_device_name,
router.fip_ns.get_name())
self.assertEqual(expected_mtu, dev_mtu)<|fim▁end|> | SOCKET_MODE = 0o664
def setUp(self):
super(UnprivilegedUserMetadataL3AgentTestCase, self).setUp() |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# flake8: noqa
from setuptools import find_packages, setup
setup(
name = "acos-client",
version = "1.4.6",
packages = find_packages(),
author = "A10 Networks",
author_email = "[email protected]",
description = "A10 Networks ACOS API Client",
license = "Apache",
keywords = "a10 axapi acos adc slb load balancer",
url = "https://github.com/a10networks/acos-client",
long_description = open('README.md').read(),
classifiers = [
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
"Programming Language :: Python :: Implementation :: CPython",<|fim▁hole|>
install_requires = ['requests>=2.3.0', 'six', 'uhashring'],
test_suite="acos_client.tests.test_suite"
)<|fim▁end|> | "Programming Language :: Python :: Implementation :: PyPy",
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules'
], |
<|file_name|>main.js<|end_file_name|><|fim▁begin|>// Fill in topbar details.
$('header .right').append(Handlebars.templates.userTopbar(user));
// Fill in sidebar details.
user.created_at = moment(user.created_at).format('MMM DD, YYYY');
var userStars = {user: user, stars: stars};
$('aside').prepend(Handlebars.templates.userSidebar(userStars));
// Populate the organizations list on the sidebar.
var orgList = $('aside #organizations');
orgs.forEach(function (org) {
orgList.append(Handlebars.templates.org(org));
});
// Populate the repos list in the main page.
var repoList = $('section.main .repo-list');
repos = _.sortBy( repos, 'updated_at' ).reverse();
repos.forEach(function (repo) {
repo.updated_at = moment(repo.updated_at).fromNow();
repoList.append(Handlebars.templates.repo(repo));<|fim▁hole|><|fim▁end|> | }); |
<|file_name|>TBinaryProtocol.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from .TProtocol import TType, TProtocolBase, TProtocolException
from struct import pack, unpack
class TBinaryProtocol(TProtocolBase):
"""Binary implementation of the Thrift protocol driver."""
# NastyHaxx. Python 2.4+ on 32-bit machines forces hex constants to be
# positive, converting this into a long. If we hardcode the int value
# instead it'll stay in 32 bit-land.
# VERSION_MASK = 0xffff0000
VERSION_MASK = -65536
# VERSION_1 = 0x80010000
VERSION_1 = -2147418112
TYPE_MASK = 0x000000ff
def __init__(self, trans, strictRead=False, strictWrite=True, **kwargs):
TProtocolBase.__init__(self, trans)
self.strictRead = strictRead
self.strictWrite = strictWrite
self.string_length_limit = kwargs.get('string_length_limit', None)
self.container_length_limit = kwargs.get('container_length_limit', None)
def _check_string_length(self, length):
self._check_length(self.string_length_limit, length)
def _check_container_length(self, length):
self._check_length(self.container_length_limit, length)
def writeMessageBegin(self, name, type, seqid):
if self.strictWrite:
self.writeI32(TBinaryProtocol.VERSION_1 | type)
self.writeString(name)
self.writeI32(seqid)
else:
self.writeString(name)
self.writeByte(type)
self.writeI32(seqid)
def writeMessageEnd(self):
pass
def writeStructBegin(self, name):
pass
def writeStructEnd(self):
pass
def writeFieldBegin(self, name, type, id):
self.writeByte(type)
self.writeI16(id)
def writeFieldEnd(self):
pass
def writeFieldStop(self):
self.writeByte(TType.STOP)
def writeMapBegin(self, ktype, vtype, size):
self.writeByte(ktype)
self.writeByte(vtype)
self.writeI32(size)
def writeMapEnd(self):
pass
def writeListBegin(self, etype, size):
self.writeByte(etype)
self.writeI32(size)
def writeListEnd(self):
pass
def writeSetBegin(self, etype, size):
self.writeByte(etype)
self.writeI32(size)
def writeSetEnd(self):
pass
def writeBool(self, bool):
if bool:
self.writeByte(1)
else:
self.writeByte(0)
def writeByte(self, byte):
buff = pack("!b", byte)
self.trans.write(buff)
def writeI16(self, i16):
buff = pack("!h", i16)
self.trans.write(buff)
def writeI32(self, i32):
buff = pack("!i", i32)
self.trans.write(buff)
def writeI64(self, i64):
buff = pack("!q", i64)
self.trans.write(buff)
def writeDouble(self, dub):
buff = pack("!d", dub)
self.trans.write(buff)
def writeBinary(self, str):
self.writeI32(len(str))
self.trans.write(str)
def readMessageBegin(self):
sz = self.readI32()
if sz < 0:
version = sz & TBinaryProtocol.VERSION_MASK
if version != TBinaryProtocol.VERSION_1:
raise TProtocolException(
type=TProtocolException.BAD_VERSION,
message='Bad version in readMessageBegin: %d' % (sz))
type = sz & TBinaryProtocol.TYPE_MASK
name = self.readString()
seqid = self.readI32()
else:
if self.strictRead:
raise TProtocolException(type=TProtocolException.BAD_VERSION,
message='No protocol version header')
name = self.trans.readAll(sz)
type = self.readByte()
seqid = self.readI32()
return (name, type, seqid)
def readMessageEnd(self):
pass
def readStructBegin(self):
pass
def readStructEnd(self):
pass
def readFieldBegin(self):
type = self.readByte()
if type == TType.STOP:
return (None, type, 0)
id = self.readI16()
return (None, type, id)
def readFieldEnd(self):
pass
def readMapBegin(self):<|fim▁hole|> size = self.readI32()
self._check_container_length(size)
return (ktype, vtype, size)
def readMapEnd(self):
pass
def readListBegin(self):
etype = self.readByte()
size = self.readI32()
self._check_container_length(size)
return (etype, size)
def readListEnd(self):
pass
def readSetBegin(self):
etype = self.readByte()
size = self.readI32()
self._check_container_length(size)
return (etype, size)
def readSetEnd(self):
pass
def readBool(self):
byte = self.readByte()
if byte == 0:
return False
return True
def readByte(self):
buff = self.trans.readAll(1)
val, = unpack('!b', buff)
return val
def readI16(self):
buff = self.trans.readAll(2)
val, = unpack('!h', buff)
return val
def readI32(self):
buff = self.trans.readAll(4)
val, = unpack('!i', buff)
return val
def readI64(self):
buff = self.trans.readAll(8)
val, = unpack('!q', buff)
return val
def readDouble(self):
buff = self.trans.readAll(8)
val, = unpack('!d', buff)
return val
def readBinary(self):
size = self.readI32()
self._check_string_length(size)
s = self.trans.readAll(size)
return s
class TBinaryProtocolFactory(object):
def __init__(self, strictRead=False, strictWrite=True, **kwargs):
self.strictRead = strictRead
self.strictWrite = strictWrite
self.string_length_limit = kwargs.get('string_length_limit', None)
self.container_length_limit = kwargs.get('container_length_limit', None)
def getProtocol(self, trans):
prot = TBinaryProtocol(trans, self.strictRead, self.strictWrite,
string_length_limit=self.string_length_limit,
container_length_limit=self.container_length_limit)
return prot
class TBinaryProtocolAccelerated(TBinaryProtocol):
"""C-Accelerated version of TBinaryProtocol.
This class does not override any of TBinaryProtocol's methods,
but the generated code recognizes it directly and will call into
our C module to do the encoding, bypassing this object entirely.
We inherit from TBinaryProtocol so that the normal TBinaryProtocol
encoding can happen if the fastbinary module doesn't work for some
reason. (TODO(dreiss): Make this happen sanely in more cases.)
In order to take advantage of the C module, just use
TBinaryProtocolAccelerated instead of TBinaryProtocol.
NOTE: This code was contributed by an external developer.
The internal Thrift team has reviewed and tested it,
but we cannot guarantee that it is production-ready.
Please feel free to report bugs and/or success stories
to the public mailing list.
"""
pass
class TBinaryProtocolAcceleratedFactory(object):
def __init__(self,
string_length_limit=None,
container_length_limit=None):
self.string_length_limit = string_length_limit
self.container_length_limit = container_length_limit
def getProtocol(self, trans):
return TBinaryProtocolAccelerated(
trans,
string_length_limit=self.string_length_limit,
container_length_limit=self.container_length_limit)<|fim▁end|> | ktype = self.readByte()
vtype = self.readByte() |
<|file_name|>IdentifiersMappingViewTest.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright 2008(c) The OBiBa Consortium. All rights reserved.
*
* This program and the accompanying materials
* are made available under the terms of the GNU Public License v3.0.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package org.obiba.opal.core.magma;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.obiba.magma.Datasource;
import org.obiba.magma.MagmaEngine;
import org.obiba.magma.NoSuchValueSetException;
import org.obiba.magma.ValueSet;
import org.obiba.magma.VariableEntity;
import org.obiba.magma.support.StaticValueTable;
import org.obiba.magma.support.VariableEntityBean;
import org.obiba.magma.type.IntegerType;
import org.obiba.magma.type.TextType;
import org.obiba.opal.core.magma.IdentifiersMappingView.Policy;
import com.google.common.collect.ImmutableSet;
import static org.fest.assertions.api.Assertions.assertThat;
/**
*
*/
public class IdentifiersMappingViewTest {
private StaticValueTable opalDataTable;
private StaticValueTable unitDataTable;
private StaticValueTable keysTable;
@Before
public void setupDataAndKeysTable() {
MagmaEngine.get();
// Create the following table:
// id,Var1,Var2
// 1,1,1
// 2,2,2
// 3,3,3
// 4,4,4
opalDataTable = new StaticValueTable(EasyMock.createMock(Datasource.class), "opal-table",
ImmutableSet.of("1", "2", "3", "4"));
opalDataTable.addVariables(IntegerType.get(), "Var1", "Var2");
for(int i = 1; i < 5; i++) {
opalDataTable.addValues("" + i, "Var1", i, "Var2", i);
}
// Create the following table:
// id,Var1,Var2
// private-1,1,1
// private-1,2,2
// private-1,3,3
// private-1,4,4
unitDataTable = new StaticValueTable(EasyMock.createMock(Datasource.class), "unit-table",
ImmutableSet.of("private-1", "private-2", "private-3", "private-4"));
unitDataTable.addVariables(IntegerType.get(), "Var1", "Var2");
for(int i = 1; i < 5; i++) {
unitDataTable.addValues("private-" + i, "Var1", i, "Var2", i);
}
// Create the following table:
// id,keys-variable
// 1,private-1
// 2,private-2
// 3,private-3
// 4,private-4
keysTable = new StaticValueTable(EasyMock.createMock(Datasource.class), "keys-table",
ImmutableSet.of("1", "2", "3", "4"));
keysTable.addVariables(TextType.get(), "keys-variable");
for(int i = 1; i < 5; i++) {
keysTable.addValues("" + i, "keys-variable", "private-" + i);
}
}
@After
public void stopYourEngine() {
MagmaEngine.get().shutdown();
}
@Test
public void test_getVariableEntities_returnsPrivateIdentifiers() {
IdentifiersMappingView fuv = createViewOnOpalDataTable();
for(VariableEntity entity : fuv.getVariableEntities()) {
assertThat(entity.getIdentifier().contains("private")).isTrue();
}
}
@Test
public void test_getVariableEntities_returnsPublicIdentifiers() {
IdentifiersMappingView fuv = createViewOnUnitDataTable();
for(VariableEntity entity : fuv.getVariableEntities()) {
assertThat(entity.getIdentifier().contains("private")).isFalse();
}
}
@Test
public void test_hasValueSet_returnsTrueForPrivateIdentifier() {
IdentifiersMappingView fuv = createViewOnOpalDataTable();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-1"))).isTrue();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-2"))).isTrue();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-3"))).isTrue();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-4"))).isTrue();
}
@Test
public void test_hasValueSet_returnsFalseForPrivateIdentifier() {
// Make unit identifiers private
IdentifiersMappingView fuv = createViewOnUnitDataTable();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-1"))).isFalse();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-2"))).isFalse();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-3"))).isFalse();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "private-4"))).isFalse();
}
@Test
public void test_hasValueSet_returnsFalseForPublicIdentifier() {
IdentifiersMappingView fuv = createViewOnOpalDataTable();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "1"))).isFalse();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "2"))).isFalse();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "3"))).isFalse();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "4"))).isFalse();
}
@Test
public void test_hasValueSet_returnsTrueForPublicIdentifier() {
// Make unit identifiers private
IdentifiersMappingView fuv = createViewOnUnitDataTable();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "1"))).isTrue();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "2"))).isTrue();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "3"))).isTrue();
assertThat(fuv.hasValueSet(new VariableEntityBean("Participant", "4"))).isTrue();
}
@Test
public void test_getValueSet_returnsValueSetForPrivateIdentifier() {
IdentifiersMappingView fuv = createViewOnOpalDataTable();
for(int i = 1; i < 5; i++) {
ValueSet vs = fuv.getValueSet(new VariableEntityBean("Participant", "private-" + i));
assertThat(vs.getValueTable()).isEqualTo(fuv);
assertThat(vs.getVariableEntity().getIdentifier()).isEqualTo("private-" + i);
}
}
@Test
public void test_getValueSet_returnsValueSetForPublicIdentifier() {
// Make unit identifiers private
IdentifiersMappingView fuv = createViewOnUnitDataTable();
for(int i = 1; i < 5; i++) {
ValueSet vs = fuv.getValueSet(new VariableEntityBean("Participant", "" + i));
assertThat(vs.getValueTable()).isEqualTo(fuv);
assertThat(vs.getVariableEntity().getIdentifier()).isEqualTo("" + i);
}
}
@Test
public void test_getValueSet_throwsNoSuchValueSetForPublicIdentifier() {<|fim▁hole|> IdentifiersMappingView fuv = createViewOnOpalDataTable();
for(int i = 1; i < 5; i++) {
try {
fuv.getValueSet(new VariableEntityBean("", "" + i));
// Must not reach this point
assertThat(true).isFalse();
} catch(NoSuchValueSetException e) {
// should reach this point
}
}
}
@Test
public void test_getValueSet_throwsNoSuchValueSetForPrivateIdentifier() {
// Make unit identifiers private
IdentifiersMappingView fuv = createViewOnUnitDataTable();
for(int i = 1; i < 5; i++) {
try {
fuv.getValueSet(new VariableEntityBean("", "private-" + i));
// Must not reach this point
assertThat(true).isFalse();
} catch(NoSuchValueSetException e) {
// should reach this point
}
}
}
private IdentifiersMappingView createViewOnOpalDataTable() {
return new IdentifiersMappingView("keys-variable", Policy.UNIT_IDENTIFIERS_ARE_PUBLIC, opalDataTable, keysTable);
}
private IdentifiersMappingView createViewOnUnitDataTable() {
return new IdentifiersMappingView("keys-variable", Policy.UNIT_IDENTIFIERS_ARE_PRIVATE, unitDataTable, keysTable);
}
}<|fim▁end|> | |
<|file_name|>listen_addr.go<|end_file_name|><|fim▁begin|>package cluster // import "github.com/docker/docker/daemon/cluster"
import (
"fmt"
"net"
"strings"
)
const (
errNoSuchInterface configError = "no such interface"
errNoIP configError = "could not find the system's IP address"
errMustSpecifyListenAddr configError = "must specify a listening address because the address to advertise is not recognized as a system address, and a system's IP address to use could not be uniquely identified"
errBadNetworkIdentifier configError = "must specify a valid IP address or interface name"
errBadListenAddr configError = "listen address must be an IP address or network interface (with optional port number)"
errBadAdvertiseAddr configError = "advertise address must be a non-zero IP address or network interface (with optional port number)"
errBadDataPathAddr configError = "data path address must be a non-zero IP address or network interface (without a port number)"
errBadDefaultAdvertiseAddr configError = "default advertise address must be a non-zero IP address or network interface (without a port number)"
)
func resolveListenAddr(specifiedAddr string) (string, string, error) {
specifiedHost, specifiedPort, err := net.SplitHostPort(specifiedAddr)
if err != nil {
return "", "", fmt.Errorf("could not parse listen address %s", specifiedAddr)
}
// Does the host component match any of the interface names on the
// system? If so, use the address from that interface.
specifiedIP, err := resolveInputIPAddr(specifiedHost, true)
if err != nil {
if err == errBadNetworkIdentifier {
err = errBadListenAddr
}
return "", "", err
}
return specifiedIP.String(), specifiedPort, nil
}
func (c *Cluster) resolveAdvertiseAddr(advertiseAddr, listenAddrPort string) (string, string, error) {
// Approach:
// - If an advertise address is specified, use that. Resolve the
// interface's address if an interface was specified in
// advertiseAddr. Fill in the port from listenAddrPort if necessary.
// - If DefaultAdvertiseAddr is not empty, use that with the port from
// listenAddrPort. Resolve the interface's address from
// if an interface name was specified in DefaultAdvertiseAddr.
// - Otherwise, try to autodetect the system's address. Use the port in
// listenAddrPort with this address if autodetection succeeds.
if advertiseAddr != "" {
advertiseHost, advertisePort, err := net.SplitHostPort(advertiseAddr)
if err != nil {
// Not a host:port specification
advertiseHost = advertiseAddr
advertisePort = listenAddrPort
}
// Does the host component match any of the interface names on the
// system? If so, use the address from that interface.
advertiseIP, err := resolveInputIPAddr(advertiseHost, false)
if err != nil {
if err == errBadNetworkIdentifier {
err = errBadAdvertiseAddr
}
return "", "", err
}
return advertiseIP.String(), advertisePort, nil
}
if c.config.DefaultAdvertiseAddr != "" {
// Does the default advertise address component match any of the
// interface names on the system? If so, use the address from
// that interface.
defaultAdvertiseIP, err := resolveInputIPAddr(c.config.DefaultAdvertiseAddr, false)
if err != nil {
if err == errBadNetworkIdentifier {
err = errBadDefaultAdvertiseAddr
}
return "", "", err
}
return defaultAdvertiseIP.String(), listenAddrPort, nil
}
systemAddr, err := c.resolveSystemAddr()
if err != nil {
return "", "", err
}
return systemAddr.String(), listenAddrPort, nil
}
// validateDefaultAddrPool validates default address pool
// it also strips white space from the string before validation
func validateDefaultAddrPool(defaultAddrPool []string, size uint32) error {
if defaultAddrPool == nil {
// defaultAddrPool is not defined
return nil
}
//if size is not set, then we use default value 24
if size == 0 {
size = 24
}
// We allow max value as 29. We can have 8 IP addresses for max value 29
// If we allow 30, then we will get only 4 IP addresses. But with latest
// libnetwork LB scale implementation, we use total of 4 IP addresses for internal use.
// Hence keeping 29 as max value, we will have 8 IP addresses. This will be
// smallest subnet that can be used in overlay network.
if size > 29 {
return fmt.Errorf("subnet size is out of range: %d", size)
}
for i := range defaultAddrPool {
// trim leading and trailing white spaces
defaultAddrPool[i] = strings.TrimSpace(defaultAddrPool[i])
_, b, err := net.ParseCIDR(defaultAddrPool[i])
if err != nil {
return fmt.Errorf("invalid base pool %s: %v", defaultAddrPool[i], err)
}
ones, _ := b.Mask.Size()
if size < uint32(ones) {
return fmt.Errorf("invalid CIDR: %q. Subnet size is too small for pool: %d", defaultAddrPool[i], size)
}
}
return nil<|fim▁hole|>// if no port is set, the default (4789) is returned
// valid port numbers are between 1024 and 49151
func getDataPathPort(portNum uint32) (uint32, error) {
// if the value comes as 0 by any reason we set it to default value 4789
if portNum == 0 {
portNum = 4789
return portNum, nil
}
// IANA procedures for each range in detail
// The Well Known Ports, aka the System Ports, from 0-1023
// The Registered Ports, aka the User Ports, from 1024-49151
// The Dynamic Ports, aka the Private Ports, from 49152-65535
// So we can allow range between 1024 to 49151
if portNum < 1024 || portNum > 49151 {
return 0, fmt.Errorf("Datapath port number is not in valid range (1024-49151) : %d", portNum)
}
return portNum, nil
}
func resolveDataPathAddr(dataPathAddr string) (string, error) {
if dataPathAddr == "" {
// dataPathAddr is not defined
return "", nil
}
// If a data path flag is specified try to resolve the IP address.
dataPathIP, err := resolveInputIPAddr(dataPathAddr, false)
if err != nil {
if err == errBadNetworkIdentifier {
err = errBadDataPathAddr
}
return "", err
}
return dataPathIP.String(), nil
}
func resolveInterfaceAddr(specifiedInterface string) (net.IP, error) {
// Use a specific interface's IP address.
intf, err := net.InterfaceByName(specifiedInterface)
if err != nil {
return nil, errNoSuchInterface
}
addrs, err := intf.Addrs()
if err != nil {
return nil, err
}
var interfaceAddr4, interfaceAddr6 net.IP
for _, addr := range addrs {
ipAddr, ok := addr.(*net.IPNet)
if ok {
if ipAddr.IP.To4() != nil {
// IPv4
if interfaceAddr4 != nil {
return nil, configError(fmt.Sprintf("interface %s has more than one IPv4 address (%s and %s)", specifiedInterface, interfaceAddr4, ipAddr.IP))
}
interfaceAddr4 = ipAddr.IP
} else {
// IPv6
if interfaceAddr6 != nil {
return nil, configError(fmt.Sprintf("interface %s has more than one IPv6 address (%s and %s)", specifiedInterface, interfaceAddr6, ipAddr.IP))
}
interfaceAddr6 = ipAddr.IP
}
}
}
if interfaceAddr4 == nil && interfaceAddr6 == nil {
return nil, configError(fmt.Sprintf("interface %s has no usable IPv4 or IPv6 address", specifiedInterface))
}
// In the case that there's exactly one IPv4 address
// and exactly one IPv6 address, favor IPv4 over IPv6.
if interfaceAddr4 != nil {
return interfaceAddr4, nil
}
return interfaceAddr6, nil
}
// resolveInputIPAddr tries to resolve the IP address from the string passed as input
// - tries to match the string as an interface name, if so returns the IP address associated with it
// - on failure of previous step tries to parse the string as an IP address itself
// if succeeds returns the IP address
func resolveInputIPAddr(input string, isUnspecifiedValid bool) (net.IP, error) {
// Try to see if it is an interface name
interfaceAddr, err := resolveInterfaceAddr(input)
if err == nil {
return interfaceAddr, nil
}
// String matched interface but there is a potential ambiguity to be resolved
if err != errNoSuchInterface {
return nil, err
}
// String is not an interface check if it is a valid IP
if ip := net.ParseIP(input); ip != nil && (isUnspecifiedValid || !ip.IsUnspecified()) {
return ip, nil
}
// Not valid IP found
return nil, errBadNetworkIdentifier
}
func (c *Cluster) resolveSystemAddrViaSubnetCheck() (net.IP, error) {
// Use the system's only IP address, or fail if there are
// multiple addresses to choose from. Skip interfaces which
// are managed by docker via subnet check.
interfaces, err := net.Interfaces()
if err != nil {
return nil, err
}
var systemAddr net.IP
var systemInterface string
// List Docker-managed subnets
v4Subnets, v6Subnets := c.config.NetworkSubnetsProvider.Subnets()
ifaceLoop:
for _, intf := range interfaces {
// Skip inactive interfaces and loopback interfaces
if (intf.Flags&net.FlagUp == 0) || (intf.Flags&net.FlagLoopback) != 0 {
continue
}
addrs, err := intf.Addrs()
if err != nil {
continue
}
var interfaceAddr4, interfaceAddr6 net.IP
for _, addr := range addrs {
ipAddr, ok := addr.(*net.IPNet)
// Skip loopback and link-local addresses
if !ok || !ipAddr.IP.IsGlobalUnicast() {
continue
}
if ipAddr.IP.To4() != nil {
// IPv4
// Ignore addresses in subnets that are managed by Docker.
for _, subnet := range v4Subnets {
if subnet.Contains(ipAddr.IP) {
continue ifaceLoop
}
}
if interfaceAddr4 != nil {
return nil, errMultipleIPs(intf.Name, intf.Name, interfaceAddr4, ipAddr.IP)
}
interfaceAddr4 = ipAddr.IP
} else {
// IPv6
// Ignore addresses in subnets that are managed by Docker.
for _, subnet := range v6Subnets {
if subnet.Contains(ipAddr.IP) {
continue ifaceLoop
}
}
if interfaceAddr6 != nil {
return nil, errMultipleIPs(intf.Name, intf.Name, interfaceAddr6, ipAddr.IP)
}
interfaceAddr6 = ipAddr.IP
}
}
// In the case that this interface has exactly one IPv4 address
// and exactly one IPv6 address, favor IPv4 over IPv6.
if interfaceAddr4 != nil {
if systemAddr != nil {
return nil, errMultipleIPs(systemInterface, intf.Name, systemAddr, interfaceAddr4)
}
systemAddr = interfaceAddr4
systemInterface = intf.Name
} else if interfaceAddr6 != nil {
if systemAddr != nil {
return nil, errMultipleIPs(systemInterface, intf.Name, systemAddr, interfaceAddr6)
}
systemAddr = interfaceAddr6
systemInterface = intf.Name
}
}
if systemAddr == nil {
return nil, errNoIP
}
return systemAddr, nil
}
func listSystemIPs() []net.IP {
interfaces, err := net.Interfaces()
if err != nil {
return nil
}
var systemAddrs []net.IP
for _, intf := range interfaces {
addrs, err := intf.Addrs()
if err != nil {
continue
}
for _, addr := range addrs {
ipAddr, ok := addr.(*net.IPNet)
if ok {
systemAddrs = append(systemAddrs, ipAddr.IP)
}
}
}
return systemAddrs
}
func errMultipleIPs(interfaceA, interfaceB string, addrA, addrB net.IP) error {
if interfaceA == interfaceB {
return configError(fmt.Sprintf("could not choose an IP address to advertise since this system has multiple addresses on interface %s (%s and %s)", interfaceA, addrA, addrB))
}
return configError(fmt.Sprintf("could not choose an IP address to advertise since this system has multiple addresses on different interfaces (%s on %s and %s on %s)", addrA, interfaceA, addrB, interfaceB))
}<|fim▁end|> | }
// getDataPathPort validates vxlan udp port (data path port) number. |
<|file_name|>coords.hpp<|end_file_name|><|fim▁begin|>// Copyright 2015 Zhu.Jin Liang
#ifndef CAFFE_UTIL_COORDS_H_
#define CAFFE_UTIL_COORDS_H_
#include <algorithm>
#include <utility>
#include <vector>
#include "caffe/util/util_pre_define.hpp"
namespace caffe {
template <typename Dtype> class Net;
template <typename Dtype>
class DiagonalAffineMap {
public:
explicit DiagonalAffineMap(const vector<pair<Dtype, Dtype> > coefs)
: coefs_(coefs) { }
static DiagonalAffineMap identity(const int nd) {
return DiagonalAffineMap(vector<pair<Dtype, Dtype> >(nd, make_pair(1, 0)));
}
inline DiagonalAffineMap compose(const DiagonalAffineMap& other) const {
DiagonalAffineMap<Dtype> out;
if (coefs_.size() == other.coefs_.size()) {
transform(coefs_.begin(), coefs_.end(), other.coefs_.begin(),
std::back_inserter(out.coefs_), &compose_coefs);
} else {
// 为了支持CropPatchFromMaxFeaturePositionLayer
if ( (coefs_.size() == 2) && (other.coefs_.size() % coefs_.size() == 0) ) {
for (int i = 0; i < other.coefs_.size(); i += 2) {
out.coefs_.push_back(compose_coefs(coefs_[0], other.coefs_[i]));
out.coefs_.push_back(compose_coefs(coefs_[1], other.coefs_[i + 1]));
}
} else if ( (other.coefs_.size() == 2) && (coefs_.size() % other.coefs_.size() == 0) ) {
for (int i = 0; i < coefs_.size(); i += 2) {
out.coefs_.push_back(compose_coefs(coefs_[i], other.coefs_[0]));
out.coefs_.push_back(compose_coefs(coefs_[i + 1], other.coefs_[1]));
}
} else {
LOG(FATAL) << "Attempt to compose DiagonalAffineMaps of different dimensions: "
<< coefs_.size() << " vs " << other.coefs_.size();
}
}
// 判断所有的coefs_是否相等,如果是,只返回一个
if (out.coefs_.size() > 2 && out.coefs_.size() % 2 == 0) {
bool isOK = true;
for (int i = 2; i < out.coefs_.size() && isOK; i += 2) {
isOK = IsEqual(out.coefs_[0].first, out.coefs_[i].first)
&& IsEqual(out.coefs_[0].second, out.coefs_[i].second)
&& IsEqual(out.coefs_[1].first, out.coefs_[i + 1].first)
&& IsEqual(out.coefs_[1].second, out.coefs_[i + 1].second);
}
if (isOK) {
out.coefs_.erase(out.coefs_.begin() + 2, out.coefs_.end());
}
}
return out;
}
inline DiagonalAffineMap inv() const {
DiagonalAffineMap<Dtype> out;
transform(coefs_.begin(), coefs_.end(), std::back_inserter(out.coefs_),
&inv_coefs);
return out;
}
inline vector<pair<Dtype, Dtype> > coefs() { return coefs_; }
private:
DiagonalAffineMap() { }
static inline pair<Dtype, Dtype> compose_coefs(pair<Dtype, Dtype> left,
pair<Dtype, Dtype> right) {
return make_pair(left.first * right.first,
left.first * right.second + left.second);
}
static inline pair<Dtype, Dtype> inv_coefs(pair<Dtype, Dtype> coefs) {
return make_pair(1 / coefs.first, - coefs.second / coefs.first);
}
vector<pair<Dtype, Dtype> > coefs_;
};
template <typename Dtype>
DiagonalAffineMap<Dtype> FilterMap(const int kernel_h, const int kernel_w,
const int stride_h, const int stride_w, const int pad_h, const int pad_w) {<|fim▁hole|> vector<pair<Dtype, Dtype> > coefs;
coefs.push_back(make_pair(stride_h,
static_cast<Dtype>(kernel_h - 1) / 2 - pad_h));
coefs.push_back(make_pair(stride_w,
static_cast<Dtype>(kernel_w - 1) / 2 - pad_w));
return DiagonalAffineMap<Dtype>(coefs);
}
} // namespace caffe
#endif // CAFFE_UTIL_COORDS_H_<|fim▁end|> | |
<|file_name|>controllers.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('Home').controller('ModalDeleteAssetCtrl',
function ($scope, $modalInstance, parentScope, HomeService,
asset, name, token, typeId, menuId) {
$scope.name = name;
$scope.message = "Are you sure you want to delete this asset ?";
$scope.ok = function () {
HomeService.deleteAsset(
token,
menuId,
asset,
function (response) {
if (response) {
if (response.error_description) {
$scope.error = response.error_description + ". Please logout!";
} else {
if (response.success) {
parentScope.removeAssetFromTemplate(typeId, asset);<|fim▁hole|> } else {
$scope.error = "Invalid server response";
}
}
);
$modalInstance.close();
};
$scope.cancel = function () {
$modalInstance.dismiss('cancel');
};
});<|fim▁end|> | } else {
$scope.message = response.message;
}
} |
<|file_name|>glob.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Vanadium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package namespace
import (
"io"
"strings"
"sync"
"v.io/v23"
"v.io/v23/context"
"v.io/v23/glob"
"v.io/v23/naming"
"v.io/v23/options"
"v.io/v23/rpc"
"v.io/v23/verror"
"v.io/x/ref/lib/apilog"
)
type tracks struct {
m sync.Mutex
places map[string]struct{}
}
func (tr *tracks) beenThereDoneThat(servers []naming.MountedServer, pstr string) bool {
tr.m.Lock()
defer tr.m.Unlock()
found := false
for _, s := range servers {
x := naming.JoinAddressName(s.Server, "") + "!" + pstr
if _, ok := tr.places[x]; ok {
found = true
}
tr.places[x] = struct{}{}
}
return found
}
// task is a sub-glob that has to be performed against a mount table. Tasks are
// done in parrallel to speed up the glob.
type task struct {
pattern *glob.Glob // pattern to match
er *naming.GlobError // error for that particular point in the name space
me *naming.MountEntry // server to match at
error error // any error performing this task
depth int // number of mount tables traversed recursively
}
// globAtServer performs a Glob on the servers at a mount point. It cycles through the set of
// servers until it finds one that replies.
func (ns *namespace) globAtServer(ctx *context.T, t *task, replies chan *task, tr *tracks, opts []rpc.CallOpt) {
defer func() {
if t.error == nil {
replies <- nil
} else {
replies <- t
}
}()
client := v23.GetClient(ctx)
pstr := t.pattern.String()
ctx.VI(2).Infof("globAtServer(%v, %v)", *t.me, pstr)
// If there are no servers to call, this isn't a mount point. No sense
// trying to call servers that aren't there.
if len(t.me.Servers) == 0 {
t.error = nil
return
}
// If we've been there before with the same request, give up.
if tr.beenThereDoneThat(t.me.Servers, pstr) {
t.error = nil
return
}
// t.me.Name has already been matched at this point to so don't pass it to the Call. Kind of sleazy to do this
// but it avoids making yet another copy of the MountEntry.
on := t.me.Name
t.me.Name = ""
timeoutCtx, cancel := withTimeout(ctx)
defer cancel()
call, err := client.StartCall(timeoutCtx, "", rpc.GlobMethod, []interface{}{pstr}, append(opts, options.Preresolved{t.me})...)
t.me.Name = on
if err != nil {
t.error = err
return
}
// At this point we're commited to the server that answered the call
// first. Cycle through all replies from that server.
for {
// If the mount table returns an error, we're done. Send the task to the channel
// including the error. This terminates the task.
var gr naming.GlobReply
err := call.Recv(&gr)
if err == io.EOF {
break
}
if err != nil {
t.error = err
return
}
var x *task
switch v := gr.(type) {
case naming.GlobReplyEntry:
// Convert to the ever so slightly different name.MountTable version of a MountEntry
// and add it to the list.
x = &task{
me: &naming.MountEntry{
Name: naming.Join(t.me.Name, v.Value.Name),
Servers: v.Value.Servers,
ServesMountTable: v.Value.ServesMountTable,
IsLeaf: v.Value.IsLeaf,
},
depth: t.depth + 1,
}
case naming.GlobReplyError:
// Pass on the error.
x = &task{
er: &v.Value,
depth: t.depth + 1,
}
}
// x.depth is the number of servers we've walked through since we've gone
// recursive (i.e. with pattern length of 0). Limit the depth of globs.
// TODO(p): return an error?
if t.pattern.Len() == 0 {
if x.depth > ns.maxRecursiveGlobDepth {
continue
}
}
replies <- x
}
t.error = call.Finish()
return
}
// depth returns the directory depth of a given name. It is used to pick off the unsatisfied part of the pattern.
func depth(name string) int {
name = strings.Trim(naming.Clean(name), "/")
if name == "" {
return 0
}
return strings.Count(name, "/") + 1
}
// globLoop fires off a go routine for each server and reads backs replies.
func (ns *namespace) globLoop(ctx *context.T, e *naming.MountEntry, prefix string, pattern *glob.Glob, reply chan naming.GlobReply, tr *tracks, opts []rpc.CallOpt) {
defer close(reply)
// Provide enough buffers to avoid too much switching between the readers and the writers.
// This size is just a guess.
replies := make(chan *task, 100)
defer close(replies)
// Push the first task into the channel to start the ball rolling. This task has the
// root of the search and the full pattern. It will be the first task fired off in the for
// loop that follows.
replies <- &task{me: e, pattern: pattern}
replies <- nil
inFlight := 1
// Perform a parallel search of the name graph. Each task will send what it learns
// on the replies channel. If the reply is a mount point and the pattern is not completely
// fulfilled, a new task will be fired off to handle it.
for inFlight != 0 {
t := <-replies
// A nil reply represents a successfully terminated task.
// If no tasks are running, return.
if t == nil {
inFlight--
continue
}
// We want to output this entry if there was a real error other than
// "not a mount table".
//
// An error reply is also a terminated task.
// If no tasks are running, return.
if t.error != nil {
if !notAnMT(t.error) {
reply <- &naming.GlobReplyError{Value: naming.GlobError{Name: naming.Join(prefix, t.me.Name), Error: t.error}}
}
inFlight--
continue
}
// If this is just an error from the mount table, pass it on.
if t.er != nil {
x := *t.er
x.Name = naming.Join(prefix, x.Name)
reply <- &naming.GlobReplyError{Value: x}
continue
}
// Get the pattern elements below the current path.
suffix := pattern
for i := depth(t.me.Name) - 1; i >= 0; i-- {
suffix = suffix.Tail()
}
// If we've satisfied the request and this isn't the root,
// reply to the caller.
if suffix.Len() == 0 && t.depth != 0 {
x := *t.me<|fim▁hole|> }
// If the pattern is finished (so we're only querying about the root on the
// remote server) and the server is not another MT, then we needn't send the
// query on since we know the server will not supply a new address for the
// current name.
if suffix.Empty() {
if !t.me.ServesMountTable {
continue
}
}
// If this is restricted recursive and not a mount table, don't descend into it.
if suffix.Restricted() && suffix.Len() == 0 && !t.me.ServesMountTable {
continue
}
// Perform a glob at the next server.
inFlight++
t.pattern = suffix
go ns.globAtServer(ctx, t, replies, tr, opts)
}
}
// Glob implements naming.MountTable.Glob.
func (ns *namespace) Glob(ctx *context.T, pattern string, opts ...naming.NamespaceOpt) (<-chan naming.GlobReply, error) {
defer apilog.LogCallf(ctx, "pattern=%.10s...,opts...=%v", pattern, opts)(ctx, "") // gologcop: DO NOT EDIT, MUST BE FIRST STATEMENT
// Root the pattern. If we have no servers to query, give up.
e, patternWasRooted := ns.rootMountEntry(pattern)
if len(e.Servers) == 0 {
return nil, verror.New(naming.ErrNoMountTable, ctx)
}
// If the name doesn't parse, give up.
g, err := glob.Parse(e.Name)
if err != nil {
return nil, err
}
tr := &tracks{places: make(map[string]struct{})}
// If pattern was already rooted, make sure we tack that root
// onto all returned names. Otherwise, just return the relative
// name.
var prefix string
if patternWasRooted {
prefix = e.Servers[0].Server
}
e.Name = ""
reply := make(chan naming.GlobReply, 100)
go ns.globLoop(ctx, e, prefix, g, reply, tr, getCallOpts(opts))
return reply, nil
}<|fim▁end|> | x.Name = naming.Join(prefix, x.Name)
reply <- &naming.GlobReplyEntry{Value: x} |
<|file_name|>mocklogger.go<|end_file_name|><|fim▁begin|>package logger
import (
"io"
"sync"
"github.com/Clever/kayvee-go/v7/router"
)
// MockRouteCountLogger is a mock implementation of KayveeLogger that counts the router rules
// applied to each log call without actually formatting or writing the log line.
type MockRouteCountLogger struct {
logger KayveeLogger
routeMatches map[string][]router.RuleOutput
}
// RuleCounts returns a map of rule names to the number of times that rule has been applied
// in routing logs for MockRouteCountLogger. Only includes routing rules that have at least
// one use.
func (ml *MockRouteCountLogger) RuleCounts() map[string]int {
out := make(map[string]int)
for k, v := range ml.routeMatches {
out[k] = len(v)
}
return out
}
// RuleOutputs returns a map of rule names to the exact logs which matched that rule (after routing has
// been applied to those logs). This allows you to inspect the routed log and verify data about it.
func (ml *MockRouteCountLogger) RuleOutputs() map[string][]router.RuleOutput {
return ml.routeMatches
}
// NewMockCountLogger returns a new MockRoutCountLogger with the specified `source`.
func NewMockCountLogger(source string) *MockRouteCountLogger {
return NewMockCountLoggerWithContext(source, nil)
}
// NewMockCountLoggerWithContext returns a new MockRoutCountLogger with the specified `source` and `contextValues`.
func NewMockCountLoggerWithContext(source string, contextValues map[string]interface{}) *MockRouteCountLogger {
routeMatches := make(map[string][]router.RuleOutput)
lg := NewWithContext(source, contextValues)
lg.setFormatLogger(&routeCountingFormatLogger{
routeMatches: routeMatches,
})
mocklg := MockRouteCountLogger{
logger: lg,
routeMatches: routeMatches,
}
return &mocklg
}
/////////////////////////////
//
// routeCountingFormatLogger
//
/////////////////////////////
// routeCountingFormatLogger implements the formatLogger interface to allow for counting
// invocations of routing rules.
type routeCountingFormatLogger struct {
mu sync.Mutex
routeMatches map[string][]router.RuleOutput
}
// formatAndLog tracks routing statistics for this mock router.
// Initialization works as with the default format logger, but no formatting or logging is actually performed.
func (fl *routeCountingFormatLogger) formatAndLog(data map[string]interface{}) {
routeData, ok := data["_kvmeta"]
if !ok {
return
}
routes, ok := routeData.(map[string]interface{})["routes"]
if !ok {
return
}
for _, route := range routes.([]map[string]interface{}) {
rule := route["rule"].(string)
fl.mu.Lock()
fl.routeMatches[rule] = append(fl.routeMatches[rule], route)
fl.mu.Unlock()
}
}
// setFormatter implements the FormatLogger method.
func (fl *routeCountingFormatLogger) setFormatter(formatter Formatter) {
// we don't format anything in this mock logger
}
// setOutput implements the FormatLogger method.
func (fl *routeCountingFormatLogger) setOutput(output io.Writer) {
// we don't output anything in this mock logger
}
/////////////////////////////////////////////////////////////
//
// KayveeLogger implementation (all passthrough to logger)
//
/////////////////////////////////////////////////////////////
// SetConfig implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) SetConfig(source string, logLvl LogLevel, formatter Formatter, output io.Writer) {
ml.logger.SetConfig(source, logLvl, formatter, output)
}
// AddContext implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) AddContext(key, val string) {
ml.logger.AddContext(key, val)
}
// GetContext implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) GetContext(key string) (interface{}, bool) {
return ml.logger.GetContext(key)
}
// SetLogLevel implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) SetLogLevel(logLvl LogLevel) {
ml.logger.SetLogLevel(logLvl)
}
// SetFormatter implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) SetFormatter(formatter Formatter) {
ml.logger.SetFormatter(formatter)
}
// SetOutput implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) SetOutput(output io.Writer) {
ml.logger.SetOutput(output)
}
// setFormatLogger implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) setFormatLogger(output formatLogger) {
// Mocks need a custom format logger
}
// SetRouter implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) SetRouter(router router.Router) {
ml.logger.SetRouter(router)
}
// Trace implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Trace(title string) {
ml.logger.Trace(title)
}
// Debug implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Debug(title string) {
ml.logger.Debug(title)
}
// Info implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Info(title string) {
ml.logger.Info(title)
}
// Warn implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Warn(title string) {
ml.logger.Warn(title)
}
// Error implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Error(title string) {
ml.logger.Error(title)
}
// Critical implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Critical(title string) {
ml.logger.Critical(title)
}
// Counter implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) Counter(title string) {
ml.logger.Counter(title)
}
// GaugeInt implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) GaugeInt(title string, value int) {
ml.logger.GaugeInt(title, value)
}
// GaugeFloat implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) GaugeFloat(title string, value float64) {
ml.logger.GaugeFloat(title, value)
}
// TraceD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) TraceD(title string, data map[string]interface{}) {
ml.logger.TraceD(title, data)
}
// DebugD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) DebugD(title string, data map[string]interface{}) {
ml.logger.DebugD(title, data)
}
// InfoD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) InfoD(title string, data map[string]interface{}) {
ml.logger.InfoD(title, data)
}
// WarnD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) WarnD(title string, data map[string]interface{}) {
ml.logger.WarnD(title, data)
}
// ErrorD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) ErrorD(title string, data map[string]interface{}) {
ml.logger.ErrorD(title, data)
}
// CriticalD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) CriticalD(title string, data map[string]interface{}) {
ml.logger.CriticalD(title, data)
}<|fim▁hole|>}
// GaugeIntD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) GaugeIntD(title string, value int, data map[string]interface{}) {
ml.logger.GaugeIntD(title, value, data)
}
// GaugeFloatD implements the method for the KayveeLogger interface.
// Logs with type = gauge, and value = value
func (ml *MockRouteCountLogger) GaugeFloatD(title string, value float64, data map[string]interface{}) {
ml.logger.GaugeFloatD(title, value, data)
}<|fim▁end|> |
// CounterD implements the method for the KayveeLogger interface.
func (ml *MockRouteCountLogger) CounterD(title string, value int, data map[string]interface{}) {
ml.logger.CounterD(title, value, data) |
<|file_name|>shuffle.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import random
from copy import copy
from cnfgen.formula.cnf import CNF
def Shuffle(F,
polarity_flips='shuffle',
variables_permutation='shuffle',
clauses_permutation='shuffle'):
"""Reshuffle the given formula F
Returns a formula logically equivalent to `F` with the
following transformations applied the following order:
1. Polarity flips, specified as one of the following
- string 'fixed': no flips is applied
- string 'shuffle': each variable is subjected
to a random flip of polarity
- a list of `-1` and `+1` of length equal to
the number of the variables in the formula `F`.
2. Variable shuffling, specified as one of the following
- string 'fixed': no shuffling
- string 'shuffle': the variable order is randomly permuted
- an sequence of integer which represents a permutation of [1,...,N],
where N is the number of the variable in the formula. The i-th variable
is mapped to the i-th index in the sequence.
3. Clauses shuffling, specified as one of the following
- string 'fixed': no shuffling
- string 'shuffle': the clauses are randomly permuted
- an sequence S of integer which represents a permutation of [0,...,M-1],
where M is the number of the variable in the formula. The i-th clause in F
is going to be at position S[i] in the new formula.
Parameters
----------
F : cnfgen.formula.cnf.CNF
formula to be shuffled
polarity_flips: string or iterable(int)
Specifies the flips of polarity
variables_permutation: string or iterable(int)<|fim▁hole|> clauses_permutation: string or iterable(int)
Specifies the permutation of the clauses.
"""
# empty cnf
out = CNF()
out.header = copy(F.header)
if 'description' in out.header:
out.header['description'] += " (reshuffled)"
i = 1
while 'transformation {}'.format(i) in out.header:
i += 1
out.header['transformation {}'.format(i)] = "Formula reshuffling"
N = F.number_of_variables()
M = F.number_of_clauses()
out.update_variable_number(N)
# Manage the parameters
perr = 'polarity_flips is either \'fixed\', \'shuffle\' or in {-1,+1]}^n'
verr = 'variables_permutation is either \'fixed\', \'shuffle\' or a permutation of [1,...,N]'
cerr = 'clauses_permutation is either \'fixed\', \'shuffle\' or a permutation of [0,...,M-1]'
# polarity flips
if polarity_flips == 'fixed':
polarity_flips = [1] * N
elif polarity_flips == 'shuffle':
polarity_flips = [random.choice([-1, 1]) for x in range(N)]
else:
if len(polarity_flips) != N:
raise ValueError(perr)
for i in range(N):
if abs(polarity_flips[i]) != 1:
raise ValueError(perr)
# variables permutation
if variables_permutation == 'fixed':
variables_permutation = range(1, N+1)
elif variables_permutation == 'shuffle':
variables_permutation = list(range(1, N+1))
random.shuffle(variables_permutation)
else:
if len(variables_permutation) != N:
raise ValueError(verr)
tmp = sorted(variables_permutation)
for i in range(N):
if i+1 != tmp[i]:
raise ValueError(verr)
#
# permutation of clauses
#
if clauses_permutation == 'fixed':
clauses_mapping = ((i, i) for i in range(M))
elif clauses_permutation == 'shuffle':
tmp = list(range(M))
random.shuffle(tmp)
clauses_mapping = sorted(enumerate(tmp), key=lambda x: x[1])
else:
if len(clauses_permutation) != M:
raise ValueError(cerr)
tmp = sorted(clauses_permutation)
for i in range(M):
if i != tmp[i]:
raise ValueError(cerr)
clauses_mapping = sorted(enumerate(clauses_permutation), key=lambda x: x[1])
# precompute literal mapping
substitution = [None] * (2 * N + 1)
for i in range(1, N+1):
substitution[i] = polarity_flips[i-1] * variables_permutation[i-1]
substitution[-i] = -substitution[i]
# load clauses
for (old, new) in clauses_mapping:
assert new == out.number_of_clauses()
out.add_clause(substitution[lit] for lit in F[old])
return out<|fim▁end|> | Specifies the permutation of the variables. |
<|file_name|>domain.cpp<|end_file_name|><|fim▁begin|>/**
* \file
* \brief Implementions of RL domains, Mountain Car (MC), HIV, and Bicycle.
*
* Copyright (c) 2008-2014 Robert D. Vincent.
*/
#include <vector>
#include <utility>
#include <cmath>
#include <string.h>
using namespace std;
#include "domain.h"
#include "random.h"
/** Returns the sign of a real number.
* \param x The number whose sign we wish to extract.
* \return The sign of \c x.
*/
double sign(double x) {
if (x == 0.0) return 0.0;
if (x < 0.0) return -1.0;
return 1.0;
}
/*
double dangle(double x) {
return fabs(x + 2.0*k*M_PI);
}
*/
/**
* Bicycle-balancing task used in Ernst et al. 2005.
*/
class Bicycle : public Domain {
private:
double dt;
double v;
double g;
double d_CM;
double c;
double h;
double M_c;
double M_d;
double M_p;
double M;
double r;
double l;
//double delta_psi; // used for reward calculation.
public:
/** Construct the bicycle balancing domain.
*/
Bicycle() {
numActions = 9;
numSteps = 500;
numDimensions = 7;
dt = 0.01;
v = 10.0/3.6;
g = 9.82;
d_CM = 0.3;
c = 0.66;
h = 0.94;
M_c = 15.0;
M_d = 1.7;
M_p = 60.0;
M = M_c + M_p;
r = 0.34;
l = 1.11;
}
bool isTerminal(vector<double> s) const {
return (s[0] > M_PI*12.0/180.0);
}
double getReward(vector<double> s, int a) const {
if (isTerminal(s)) return -1.0;
//return 0.1 * delta_psi;
return 0;
}
OneStepResult performAction(vector<double> s, int a) {
vector<double> sp(numDimensions, 0.0);
double ad[] = { 0.0, 0.0, 0.0, -0.02, -0.02, -0.02, 0.02, 0.02, 0.02 };
double aT[] = { 0.0, 2.0, -2.0, 0.0, 2.0, -2.0, 0.0, 2.0, -2.0 };
double d = ad[a];
double T = aT[a];
double w = rndInterval(-0.02, 0.02);
double dot_sigma = v/r;
double I_bnc = 13.0/3.0*M_c*h*h + M_p*(h + d_CM)*(h + d_CM);
double I_dc = M_d * r * r;
double I_dv = 3.0/2.0 * M_d * r * r;
double I_dl = 1.0/2.0 * M_d * r * r;
double omega = s[0];
double dot_omega = s[1];
double theta = s[2];
double dot_theta = s[3];
double x_b = s[4];
double y_b = s[5];
double psi = s[6];
double phi = omega + atan(d + w)/h;
double invrf = fabs(sin(theta))/l;
double invrb = fabs(tan(theta))/l;
double invrcm = (theta == 0.0) ? 0.0 : 1.0/sqrt((l-c)*(l-c) + (1.0/invrb)*(1.0/invrb));
sp[0] = omega + dt * dot_omega;
sp[1] = dot_omega + dt * (1.0 / I_bnc) * (M*h*g*sin(phi) - cos(phi)*(I_dc*dot_sigma*dot_theta + sign(theta)*v*v*(M_d*r*(invrb+invrf)+M*h*invrcm)));
sp[2] = theta + dt * dot_theta;
sp[3] = dot_theta + dt * (T - I_dv*dot_sigma*dot_omega)/I_dl;
sp[4] = x_b + dt * v * cos(psi);
sp[5] = y_b + dt * v * sin(psi);
sp[6] = psi + dt * sign(theta)*v*invrb;
//delta_psi = dangle(psi) - dangle(sp[6]);
if (fabs(theta) > M_PI*80.0/180.0) {
sp[2] = sign(theta)*M_PI*80.0/180.0;
sp[3] = 0.0;
}
OneStepResult p(sp, getReward(sp, a));
return p;
}
vector<double> initialState() {
vector<double> s(numDimensions, 0.0);
s[6] = M_PI;
return s;
}
};
/**
* \brief Implements the HIV model defined by Adams et al. (2004, 2005) and
* used by Ernst et al. (2006).
*
* This domain simulates the dynamics of HIV infection at the cellular level.
* It uses a six-dimensional real-valued state
* vector, in the order T1, T2, T1*, T2*, V, E, where T1 and T2 are the
* populations of uninfected type 1 and type 2 cells, and T1* and T2* are
* the populations of infected type 1 and 2 cells. V is the viral population,
* and E is the population of immune effectors.
*
* The problem is deterministic. It has three stable states, corresponding
* to an "uninfected" state, an "unhealthy" state, and a "healthy" state.
* The goal of the problem is to learn how to move the model from the
* unhealthy state to the healthy state.
*
* The action space in this implementation is limited to four discrete
* choices: No therapy, reverse transcriptase inhibitor only (RTI),
* protease inhibitor (PI) only, or both RTI and PI
* simultaneously. The RTI and PI have fixed values.
* These are the stable state vectors:
*
* - unhealthy: (163574.0, 5.0, 11945.0, 46.0, 63919.0, 24.0)
* - healthy: (967839.0, 621.0, 76.0, 6.0, 415.0, 353108.0)
* - uninfected: (1000000.0, 3198.0, 0.0, 0.0, 0.0, 10.0)
*/
class HIV : public Domain {
private:
double Q; /**< Coefficient of the viral load in the reward function. */
double R1; /**< Coefficient of the RTI in the reward function. */
double R2; /**< Coefficient of the PI in the reward function. */
double S; /**< Coefficient of the immune effectors. */
double l1; /**< type 1 cell production rate */
double d1; /**< type 1 cell death rate */
double k1; /**< population 1 infection rate */
double l2; /**< type 2 cell production rate */
double d2; /**< type 2 cell death rate */
double f; /**< treatment efficacy reduction in population 2 */
double k2; /**< population 2 infection rate */
double delta; /**< infected cell death rate */
double m1; /**< immune-induced clearance rate for population 1 */
double m2; /**< immune-induced clearance rate for population 2 */
double NT; /**< virions produced per infected cell */
double c; /**< virus natural death rate */
double p1; /**< average number of virions infecting a type 1 cell */
double p2; /**< average number of virions infecting a type 2 cell */
double lE; /**< immune effector production rate */
double bE; /**< maximum birth rate for immune effectors */
double Kb; /**< saturation constant for immune effector birth */
double dE; /**< maximum death rate for immune effectors */
double Kd; /**< saturation constant for immune effector death */
double deltaE; /**< natural death rate for immune effectors */
// Other constants
double dt; /**< Our integration timestep, in days. */
int nInt; /**< Number of integration steps per action. */
public:
/**
* Constructor for the HIV domain.
*/
HIV() {
numActions = 4;
numSteps = 200;
numDimensions = 6;
// constants for the reward function
Q = 0.1;
R1 = 20000.0;
R2 = 2000.0;
S = 1000.0;
// Constants for the ODE's
l1 = 10000.0; // type 1 cell production rate
d1 = 0.01; // type 1 cell death rate
k1 = 8.0e-7; // population 1 infection rate
l2 = 31.98; // type 2 cell production rate
d2 = 0.01; // type 2 cell death rate
f = 0.34; // treatment efficacy reduction in population 2
k2 = 1e-4; // population 2 infection rate
delta = 0.7; // infected cell death rate
m1 = 1.0e-5; // immune-induced clearance rate for population 1
m2 = 1.0e-5; // immune-induced clearance rate for population 2
NT = 100.0; // virions produced per infected cell
c = 13.0; // virus natural death rate
p1 = 1.0; // average number of virions infecting a type 1 cell
p2 = 1.0; // average number of virions infecting a type 2 cell
lE = 1.0; // immune effector production rate
bE = 0.3; // maximum birth rate for immune effectors
Kb = 100.0; // saturation constant for immune effector birth
dE = 0.25; // maximum death rate for immune effectors
Kd = 500.0; // saturation constant for immune effector death
deltaE = 0.1; // natural death rate for immune effectors
// Other constants
dt = 0.001; // Our integration timestep, in days.
nInt = (int)(5.0 / dt); // Number of integration steps per action.
}
/**
* Calculate the reward for the HIV domain. The reward is a
* continuous function of the action (treatment option), the virus
* population (\c s[4]) and the immune effector count (\c s[5]).
*/
double getReward(vector<double> s, int a) const {
// e1 is between 0.0 and 0.7 (RTI therapy on/off)
// e2 is between 0.0 and 0.3 (PI therapy on/off)
double V = s[4];
double E = s[5];
double e1 = ((a & 1) != 0) ? 0.7 : 0.0;
double e2 = ((a & 2) != 0) ? 0.3 : 0.0;
return -(Q*V + R1*e1*e1 + R2*e2*e2 - S*E);
}
/**
* Calculate the next state of the environment. The equations
* are integrated using a simple Euler method.
*/
OneStepResult performAction(vector<double> s, int a) {
/* This version is restricted to only four possible actions.
*/
double e1 = ((a & 1) != 0) ? 0.7 : 0.0;
double e2 = ((a & 2) != 0) ? 0.3 : 0.0;
vector<double> dy(numDimensions);
vector<double> y = s;
for (int i = 0; i < nInt; i++) {
dy[0] = l1 - d1 * y[0] - (1 - e1) * k1 * y[4] * y[0];
dy[1] = l2 - d2 * y[1] - (1 - f * e1) * k2 * y[4] * y[1];
dy[2] = (1 - e1) * k1 * y[4] * y[0] - delta * y[2] - m1 * y[5] * y[2];
dy[3] = (1 - f * e1) * k2 * y[4] * y[1] - delta * y[3] - m2 * y[5] * y[3];
dy[4] = (1.0 - e2) * NT * delta * (y[2] + y[3]) - c * y[4] -
((1 - e1) * p1 * k1 * y[0] + (1 - f * e1) * p2 * k2 * y[1]) * y[4];
dy[5] = lE + (bE * (y[2] + y[3]) * y[5]) / (y[2] + y[3] + Kb) -
(dE * (y[2] + y[3]) * y[5]) / (y[2] + y[3] + Kd) - deltaE * y[5];
for (int j = 0; j < numDimensions; j++)
y[j] += dy[j] * dt;
}
OneStepResult p(y, getReward(y, a));
return p;
}
/**
* The initial state in the environment is the "sick" stable state.
* There are two other stable states, a "healthy infected" state,
* and an "uninfected" state.
*/
vector<double> initialState() {
vector<double> s(numDimensions);
/* This is the "sick" initial state.
*/
s[0] = 163574.0;
s[1] = 5.0;
s[2] = 11945.0;
s[3] = 46.0;
s[4] = 63919.0;
s[5] = 24.0;
return s;
}
};
/**
* Implementation of the classic "mountain-car" reinforcement learning
* problem from Singh and Sutton 1996. It implements a two-dimensional
* continuous state consisting of the car's position and velocity.
*/
class MC : public Domain {
static const double min_x = -1.2; /**< Minimum position. */
static const double max_x = 0.5; /**< Maximum position. */
static const double min_v = -0.07; /**< Minimum velocity. */
static const double max_v = 0.07; /**< Maximum velocity. */
public:
/**
* Construct a mountain-car environment.
*/
MC() {
numDimensions = 2;
numActions = 3;
numSteps = 2000;
}
/**
* The domain is stochastic, in that it begins at a random initial
* state. It is otherwise deterministic.
*/
bool isStochastic() { return true; }
/**
* Return the reward for this state and action. For mountain car the
* usual implementation is to give a reward of -1 for every time step
* before reaching the goal.
* \param s The state vector.
* \param a The action.
* \return The reward received.
*/
double getReward(vector<double> s, int a) const {
if (isTerminal(s)) {
return 0.0;
}
else return -1.0;
}
/**
* Return the initial state for the task. Selects uniformly random values
* from the legal range of the position and velocity values.
* \return A two-dimensional state vector consisting of a random legal
* position and velocity.
*/
vector<double> initialState() {
vector<double> s(numDimensions);
s[0] = rndInterval(min_x, max_x);
s[1] = rndInterval(min_v, max_v);
return s;
}
/**
* Perform one time step in the mountain car environment.
* \param s The two-dimensional mountain car state vector.
* \param a The action to perform, where 0 means full reverse, 2 means full forward, and 1 implies no acceleration.
* \return A pair containing the next state and reward.
*/
OneStepResult performAction(vector<double> s, int a) {
double acc = 0.0;
if (a == 0) {
acc = -1.0;
}
if (a == 2) {
acc = 1.0;
}
double x0 = s[0];
double v0 = s[1];
double v1 = v0 + acc * 0.001 + cos(3.0 * x0) * -0.0025;
<|fim▁hole|> v1 = min_v;
}
else if (v1 > max_v) {
v1 = max_v;
}
double x1 = x0 + v1;
if (x1 < min_x) {
x1 = min_x;
}
else if (x1 > max_x) {
x1 = max_x;
}
vector<double> s1(numDimensions);
s1[0] = x1;
s1[1] = v1;
OneStepResult p(s1, getReward(s1, a));
return p;
}
/**
* Returns true if the car has reached the goal state.
* \param s The state to evaluate.
* \return True if the car's position is at is maximum.
*/
bool isTerminal(vector<double> s) const { return (s[0] >= max_x); }
};
/**
* Create a domain by name. Avoids having to export domain classes outside
* this module.
* \param name The name of the domain to create. It is not case-sensitive.
* The default is HIV
* \param propfile The name of an optional property file, which will provide
* configuration information for the domain.
* \return The domain object.
*/
Domain *CreateDomain(const char *name, const char *propfile) {
if (!strcasecmp(name, "MC")) {
return new MC();
}
else if (!strcasecmp(name, "Bicycle")) {
return new Bicycle();
}
else if (!strcasecmp(name, "rf")) {
extern Domain *getRF(const char *);
return getRF(propfile);
}
else if (!strcasecmp(name, "tass")) {
extern Domain *getTass(const char *);
return getTass(propfile);
}
else {
return new HIV();
}
}<|fim▁end|> | // Enforce bounds.
if (v1 < min_v) { |
<|file_name|>networkDrawer.py<|end_file_name|><|fim▁begin|>import sys
import collections as c
from scipy import special, stats
import numpy as n, pylab as p, networkx as x
class NetworkDrawer:
drawer_count=0
def __init__(self,metric="strength"):
self.drawer_count+=1
metric_=self.standardizeName(metric)
self.metric_=metric_
self.draw_count=0
def standardizeName(self,name):
if name in (["s","strength","st"]+["f","força","forca","fo"]):
name_="s"
if name in (["d","degree","dg"]+["g","grau","gr"]):
name_="d"
return name_
def makeLayout(self,network_measures,network_partitioning=None):
"""Delivers a sequence of user_ids and (x,y) pos.
"""
self.network_measures=network_measures
if self.metric_=="s":
measures_=network_measures.strengths
elif self.metric_=="d":
measures_=network_measures.degrees
else:
print("not known metric to make layout")
self.ordered_measures=ordered_measures = c.OrderedDict(sorted(measures_.items(), key=lambda x: x[1]))
self.measures=measures=list(ordered_measures.values())
self.authors=authors= list(ordered_measures.keys())
total=network_measures.N
if not network_partitioning:
self.k1=k1=round(total*.80)
self.k2=k2=round(total*.95)
self.periphery=authors[:k1]
self.intermediary=authors[k1:k2]
self.hubs=authors[k2:]
else:
sectors=network_partitioning.sectorialized_agents__
self.k1=k1=len(sectors[0])
self.k2=k2=k1+len(sectors[1])
self.periphery,self.intermediary,self.hubs=sectors
print("fractions ={:0.4f}, {:0.4f}, {:0.4f}".format(k1/total, (k2-k1)/total, 1-k2/total))
self.makeXY()
def drawNetwork(self, network,network_measures,filename="example.png",label="auto",network_partitioning=None):
p.clf()
if self.metric_=="s":
measures_=network_measures.strengths
elif self.metric_=="d":
measures_=network_measures.degree
else:
print("not known metric to make layout")
ordered_measures = c.OrderedDict(sorted(measures_.items(), key=lambda x: x[1]))
measures=list(ordered_measures.values())
authors= list(ordered_measures.keys())
total=network_measures.N
if not network_partitioning:
k1=k1=round(total*.80)
k2=k2=round(total*.95)
periphery=authors[:k1]
intermediary=authors[k1:k2]
hubs=authors[k2:]
else:
sectors=network_partitioning.sectorialized_agents__
k1=k1=len(sectors[0])
k2=k2=k1+len(sectors[1])
periphery,intermediary,hubs=(set(iii) for iii in sectors)
in_measures=network_measures.in_strengths
min_in=max(in_measures.values())/3+0.1
out_measures=network_measures.out_strengths
min_out=max(out_measures.values())/3+.1
self.clustering=clustering=network_measures.weighted_clusterings
A=x.drawing.nx_agraph.to_agraph(network.g)
A.node_attr['style']='filled'
A.graph_attr["bgcolor"]="black"
A.graph_attr["pad"]=.1
#A.graph_attr["size"]="9.5,12"
A.graph_attr["fontsize"]="25"
if label=="auto":
label=self.makeLabel()
A.graph_attr["label"]=label
A.graph_attr["fontcolor"]="white"
cm=p.cm.Reds(range(2**10)) # color table
self.cm=cm
nodes=A.nodes()
self.colors=colors=[]
self.inds=inds=[]<|fim▁hole|> for node in nodes:
n_=A.get_node(node)
ind_author=self.authors.index(n_)
inds.append(inds)
colors.append( '#%02x%02x%02x' % tuple([int(255*i) for i in cm[int(clustering[n_]*255)][:-1]]))
#n_.attr['fillcolor']= '#%02x%02x%02x' % tuple([255*i for i in cm[int(clustering[n_]*255)][:-1]])
n_.attr['fillcolor']= colors[-1]
n_.attr['fixedsize']=True
n_.attr['width']= abs(.6*(in_measures[n_]/min_in+ .05))
n_.attr['height']= abs(.6*(out_measures[n_]/min_out+.05))
if n_ in hubs:
n_.attr["shape"] = "hexagon"
elif n_ in intermediary:
pass
else:
n_.attr["shape"] = "diamond"
pos="%f,%f"%tuple(self.posXY[ind_author])
poss.append(pos)
n_.attr["pos"]=pos
n_.attr["pin"]=True
n_.attr["fontsize"]=25
n_.attr["fontcolor"]="white"
n_.attr["label"]=""
weights=[s[2]["weight"] for s in network_measures.edges]
self.weights=weights
max_weight=max(weights)
self.max_weight=max_weight
self.weights_=[]
edges=A.edges()
for e in edges:
factor=float(e.attr['weight'])
self.weights_.append(factor)
e.attr['penwidth']=.34*factor
e.attr["arrowsize"]=1.5
e.attr["arrowhead"]="lteeoldiamond"
w=factor/max_weight # factor em [0-1]
cor=p.cm.Spectral(int(w*255))
self.cor=cor
cor256=255*n.array(cor[:-1])
r0=int(cor256[0]/16)
r1=int(cor256[0]-r0*16)
r=hex(r0)[-1]+hex(r1)[-1]
g0=int(cor256[1]/16)
g1=int(cor256[1]-g0*16)
g=hex(g0)[-1]+hex(g1)[-1]
b0=int(cor256[2]/16)
b1=int(cor256[2]-b0*16)
b=hex(b0)[-1]+hex(b1)[-1]
#corRGB="#"+r+g+b+":#"+r+g+b
corRGB="#"+r+g+b
e.attr["color"]=corRGB
A.draw(filename, prog="neato") # twopi ou circo
################
self.A=A
self.draw_count+=1
def makeLabel(self):
label=""
if "window_size" in dir(self):
label+="w: {}, ".format(self.window_size)
#m: %i, N = %i, E = %i"%(self.draw_count*self.step_size,self.network_measures.N,self.network_measures.E)
if "step_size" in dir(self):
label+="m: {} ,".format(self.draw_count*self.step_size+self.offset)
else:
label+="m: %i, ".format(self.draw_count)
#self.network_measures.N,self.network_measures.E)
label+="N = %i, E = %i"%(self.network_measures.N,self.network_measures.E)
return label
def updateNetwork(self,network,networkMeasures=None):
pass
def makeXY(self):
size_periphery=self.k1
size_intermediary=self.k2-self.k1
size_hubs=self.network_measures.N-self.k2
if size_hubs%2==1:
size_hubs+=1
size_intermediary-=1
xh=n.linspace(0,0.5,size_hubs,endpoint=False)[::-1]
thetah=2*n.pi*xh
yh=n.sin(thetah)
xi=n.linspace(1,0.5, size_intermediary, endpoint=True)
thetai=2*n.pi*xi
yi=n.sin(thetai)
xp=n.linspace(.95,0.4, size_periphery)[::-1]
yp=n.linspace(.1,1.25, size_periphery)[::-1]
self.pos=((xp,yp),(xi,yi),(xh,yh))
XFACT=7
YFACT=3
self.posX=posX=n.hstack((xp,xi,xh))*XFACT
self.posY=posY=n.hstack((yp,yi,yh))*YFACT
self.posXY=n.vstack((posX.T,posY.T)).T<|fim▁end|> | self.poss=poss=[] |
<|file_name|>logging-separate-lines.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.<|fim▁hole|>
#[macro_use]
extern crate log;
use std::old_io::Command;
use std::env;
use std::str;
fn main() {
let args: Vec<String> = env::args().collect();
if args.len() > 1 && args[1] == "child" {
debug!("foo");
debug!("bar");
return
}
let p = Command::new(&args[0])
.arg("child")
.spawn().unwrap().wait_with_output().unwrap();
assert!(p.status.success());
let mut lines = str::from_utf8(&p.error).unwrap().lines();
assert!(lines.next().unwrap().contains("foo"));
assert!(lines.next().unwrap().contains("bar"));
}<|fim▁end|> |
// ignore-windows
// exec-env:RUST_LOG=debug
// compile-flags:-C debug-assertions=y |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
``nanshe`` package, an image processing toolkit.
===============================================================================
Overview
===============================================================================
The ``nanshe`` package is an image processing package that contains a variety
of different techniques, which are used primarily to assemble the ADINA
algorithm proposed by Diego, et al.
( doi:`10.1109/ISBI.2013.6556660`_ ) to extract active neurons from
an image sequence. This algorithm uses online dictionary learning (a form of
matrix factorization) at its heart as implemented by Marial, et al.
( doi:`10.1145/1553374.1553463`_ ) to find a set of atoms (or basis
images) that are representative of an image sequence and can be used to
approximately reconstruct the sequence. However, it is designed in a modular
way so that a different matrix factorization could be swapped in and
appropriately parameterized. Other portions of the algorithm include a
preprocessing phase that has a variety of different techniques that can be
applied optionally. For example, removing registration artifacts from
a line-by-line registration algorithm, background subtraction, and a wavelet
transform to filter objects in a particular size.
===============================================================================
Installation
===============================================================================
-------------------------------------------------------------------------------
Dependencies
-------------------------------------------------------------------------------
Implementation of the algorithm has been done here in pure Python. However, a
few dependencies are required to get started. These include NumPy_, SciPy_,
h5py_, scikit-image_, SPAMS_, VIGRA_, and rank_filter_. The first 4 can be
found in standard distributions like Anaconda_. Installing VIGRA and
rank_filter can be done by using CMake_. SPAMS requires an existing BLAS/LAPACK
implementation. On Mac and Linux, this can be anything. Typically ATLAS_ is
used, but OpenBLAS_ or `Intel MKL`_ (if available) can be used, as well. This
will require modifying the setup.py script. On Windows, the setup.py links to
R_, which should be changed if another BLAS is available.
-------------------------------------------------------------------------------
Building
-------------------------------------------------------------------------------
Python
===============================================================================
As this package is pure Python, building follows through the standard method.
Currently, we require setuptools_ for installation; so, make sure it is
installed. Then simply issue the following command to build and install.
.. code-block:: sh
python setup.py install
Alternatively, one can build and then install in two steps if that is
preferable.
.. code-block:: sh
python setup.py build
python setup.py install
Conda
===============================================================================
Current packages can be found on our anaconda_ channel
( https://anaconda.org/nanshe/nanshe ). New ones are released every time a
passing tagged release is pushed to the ``master`` branch on GitHub. It is also
possible to build packages for conda_ for non-release commits as we do in our
continuous integration strategy.
To do this one requires the dependencies be installed or be available from a
anaconda channel. Additionally, one must be using the conda's ``root``
environment and have conda-build installed. Once this is done one need
only the run the following command with ``setup.py``.
.. code-block:: sh
python setup.py bdist_conda
Assuming this completes successfully, conda will provide the path to the built
package.
-------------------------------------------------------------------------------
Testing
-------------------------------------------------------------------------------
Running the test suite is fairly straightforward. Testing is done using nose_;
so, make sure you have a running copy if you wish to run the tests. Some of the
tests require drmaa_ installed and properly configured. If that is not the
case, those tests will be skipped automatically. To run the test suite, one
must be in the source directory. Then simply run the following command. This
will run all the tests and doctests. Depending on your machine, this will take
a few minutes to complete.
.. code-block:: sh
nosetests
The full test suite includes 3D tests, which are very slow to run and so are
not run by default. As the code has been written to be dimensionally agnostic,
these tests don't cover anything that the 2D tests don't already cover. To run
the 3D tests, simply use ``setup.all.cfg``.
.. code-block:: sh
nosetests -c setup.all.cfg
It is also possible to run this as part of the setup.py process. In which case,
this can be done as shown below. If 3D tests are required for this portion, one
need only replace ``setup.cfg`` with ``setup.all.cfg``.
.. code-block:: sh
python setup.py nosetests
Also, the typical ``test`` subcommand can be used to run ``nosetests``, but no
other arguments are allowed.
.. code-block:: sh
python setup.py test
-------------------------------------------------------------------------------
Documentation
-------------------------------------------------------------------------------
Current documentation can be found on the GitHub page
( http://nanshe-org.github.io/nanshe/ ). A new copy is rebuilt any time there is
a passing commit is added to the ``master`` branch. Each documentation commit
is added to ``gh-pages`` branch with a reference to the commit in ``master``
that triggered the build as well as the tag (version) if provided.
It is also possible to build the documentation from source. This project uses
Sphinx_ for generating documentation. Please make sure you have it installed.
In particular, a version from 1.3 or later is required. Additionally, the
`Cloud Sphinx Theme`_ is required for generating the documentation and is used
in the HTML layout.
The ``rst`` files (outside of ``index.rst`` are not distributed with the source
code. This is because it is trivial to generate them and it is to easy for the
code to become out of sync with documentation if they are distributed. However,
building ``rst`` files has been made a dependency of all other documentation
build steps so one does not have to think about this. The preferred method for
building documentation is to use the ``setup.py`` hook as shown below. This
will build the RST files and place them in ``docs/``. It will also build the
HTML files by default and put them in the directory ``build/sphinx/html/``.
Simply open the ``index.html`` file to take a look.
.. code-block:: sh
python setup.py build_sphinx
More build options can be determined by running the help command.
.. code-block:: sh
python setup.py build_sphinx --help
-------------------------------------------------------------------------------
Cleaning
-------------------------------------------------------------------------------
After any building operation a number of undesirable intermediate files are
created and left behind that one may wish to remove. To do this one merely
needs to run the clean command.
.. code-block:: sh
python setup.py clean
This has been modified to also remove RST files generated when building
documentation. However, it will leave any final build products like HTML files.
If one wishes to remove everything built (including final build products), the
clean all command will do this.
.. code-block:: sh
python setup.py clean --all
.. _`10.1109/ISBI.2013.6556660`: http://dx.doi.org/10.1109/ISBI.2013.6556660
.. _`10.1145/1553374.1553463`: http://dx.doi.org/10.1145/1553374.1553463
.. _NumPy: http://www.numpy.org/
.. _SciPy: http://www.scipy.org/
.. _h5py: http://www.h5py.org/
.. _scikit-image: http://scikit-image.org/
.. _SPAMS: http://spams-devel.gforge.inria.fr/
.. _VIGRA: http://ukoethe.github.io/vigra/
.. _rank_filter: http://github.com/nanshe-org/rank_filter/
.. _Anaconda: http://store.continuum.io/cshop/anaconda/
.. _CMake: http://www.cmake.org/
.. _ATLAS: http://math-atlas.sourceforge.net/
.. _OpenBLAS: http://www.openblas.net/
.. _`Intel MKL`: http://software.intel.com/en-us/intel-mkl
.. _R: http://www.r-project.org/
.. _setuptools: http://pythonhosted.org/setuptools/
.. _anaconda: https://anaconda.org/
.. _conda: http://conda.pydata.org/
.. _nose: http://nose.readthedocs.org/en/latest/
.. _drmaa: http://github.com/pygridtools/drmaa-python
.. _Sphinx: http://sphinx-doc.org/
.. _`Cloud Sphinx Theme`: https://pythonhosted.org/cloud_sptheme/
"""
__author__ = "John Kirkham <[email protected]>"
__date__ = "$Dec 22, 2014 08:46:12 EST$"
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
__all__ = [
"box", "converter", "io", "imp", "learner", "registerer", "syn", "util"
]
from nanshe import box<|fim▁hole|>from nanshe import io
from nanshe import imp
from nanshe import learner
from nanshe import registerer
from nanshe import syn
from nanshe import util<|fim▁end|> | from nanshe import converter |
<|file_name|>archive.go<|end_file_name|><|fim▁begin|>package http
import (
"strconv"
"go-common/app/service/main/archive/api"
"go-common/library/ecode"
bm "go-common/library/net/http/blademaster"
)
func pageList(c *bm.Context) {
var (
aid int64
err error
pages []*api.Page
)
aidStr := c.Request.Form.Get("aid")
if aid, err = strconv.ParseInt(aidStr, 10, 64); err != nil || aid <= 0 {
c.JSON(nil, ecode.RequestErr)
return
}
if pages, err = playSvr.PageList(c, aid); err != nil {
c.JSON(nil, err)
return
}
if len(pages) == 0 {
c.JSON(nil, ecode.NothingFound)
return
}
c.JSON(pages, nil)
}
func videoShot(c *bm.Context) {
v := new(struct {
Aid int64 `form:"aid" validate:"min=1"`
Cid int64 `form:"cid"`
Index bool `form:"index"`
})
if err := c.Bind(v); err != nil {
return
}
c.JSON(playSvr.VideoShot(c, v.Aid, v.Cid, v.Index))
}
func playURLToken(c *bm.Context) {
var (
aid, cid, mid int64
err error
)
params := c.Request.Form
aidStr := params.Get("aid")
if aid, err = strconv.ParseInt(aidStr, 10, 64); err != nil {
c.JSON(nil, ecode.RequestErr)
return<|fim▁hole|> }
cid, _ = strconv.ParseInt(params.Get("cid"), 10, 64)
midStr, _ := c.Get("mid")
mid = midStr.(int64)
c.JSON(playSvr.PlayURLToken(c, mid, aid, cid))
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2020 DDN. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
use std::{collections::BTreeSet, env};
pub mod api;
pub mod api_utils;
pub mod display_utils;
pub mod error;
pub mod filesystem;
pub mod nginx;
pub mod ostpool;
pub mod profile;
pub mod server;
pub mod snapshot;
pub mod stratagem;
pub mod update_repo_file;
pub fn parse_hosts(hosts: &[String]) -> Result<BTreeSet<String>, error::ImlManagerCliError> {
let parsed: Vec<BTreeSet<String>> = hosts
.iter()
.map(|x| hostlist_parser::parse(x))
.collect::<Result<_, _>>()?;
let union = parsed
.into_iter()
.fold(BTreeSet::new(), |acc, h| acc.union(&h).cloned().collect());
Ok(union)
}
fn exe_name() -> Option<String> {
Some(
std::env::current_exe()
.ok()?
.file_stem()?
.to_str()?
.to_string(),
)
}
pub fn selfname(suffix: Option<&str>) -> Option<String> {
match env::var("CLI_NAME") {
Ok(n) => suffix.map(|s| format!("{}-{}", n, s)).or_else(|| Some(n)),<|fim▁hole|> Err(_) => exe_name(),
}
}<|fim▁end|> | |
<|file_name|>ChunkResponder.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016, WSO2 Inc. (http://wso2.com) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.msf4j;
import io.netty.buffer.ByteBuf;
import java.io.Closeable;
import java.io.IOException;
import java.nio.ByteBuffer;
/**
* A responder for sending chunk-encoded response.
*/
public interface ChunkResponder extends Closeable {
/**
* Adds a chunk of data to the response. The content will be sent to the client asynchronously.
*
* @param chunk content to send
* @throws IOException if the connection is already closed
*/
void sendChunk(ByteBuffer chunk) throws IOException;
/**
* Adds a chunk of data to the response. The content will be sent to the client asynchronously.
*
* @param chunk content to send
* @throws IOException if this {@link ChunkResponder} already closed or the connection is closed
*/<|fim▁hole|> void sendChunk(ByteBuf chunk) throws IOException;
/**
* Closes this responder which signals the end of the chunk response.
*/
@Override
void close() throws IOException;
}<|fim▁end|> | |
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from msw.models import Page, RichText, MembersPostUser, MembersPostText
from django.contrib import admin
# could add more complicated stuff here consult:
# tutorial: https://docs.djangoproject.com/en/dev/intro/tutorial02/#enter-the-admin-site
# tutorial finished admin.py: https://github.com/haoqili/Django-Tutorial-Directory/blob/master/tutorialSite/polls/admin.py
admin.site.register(Page)
admin.site.register(RichText)<|fim▁hole|>admin.site.register(MembersPostUser)
admin.site.register(MembersPostText)<|fim▁end|> | |
<|file_name|>ezRPConfig.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013-2015 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''
Module to share configuration information across modules.
This global object is used through out to store and retreive configuration.
This is to avoid passing gConfig as variables throughout.
All the configurations needed are added in ezReverseProxy.
'''
# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4
import sys
import os
import time
from gevent.queue import JoinableQueue
from ezbake.reverseproxy.thriftapi.ttypes import AuthorizationOperation<|fim▁hole|>'''
We want addGreenlets() and kill() to access global members without an instance,
perhaps the simplest idea is to just make them simple functions outside the class,
not class methods. I tried @staticmethod decorator.
#class EzRPConfig(object):
'''
appName = 'EzBakeFrontend'
watches = {}
containerDir = os.path.abspath(os.path.join(os.path.abspath(__file__),os.pardir,os.pardir,os.pardir,os.pardir))
configurationChangeQueue = JoinableQueue()
run = True
clientService = None
zkMonitor = None
cfgGreenlet = None
wGreenlet = None
current_milli_time = lambda: int(round(time.time() * 1000))
if getattr(sys, 'frozen', False):
containerDir = os.path.abspath(os.path.join(os.path.dirname(sys.executable),os.pardir,os.pardir))
templateDir = os.path.join(containerDir,'app','templates')
nginx = os.path.join(containerDir,'app','nginx')
eznginxlibpath = os.path.join(containerDir,'libs')
workingDirectory = os.path.join(containerDir,'wd')
logDirectory = os.path.join(containerDir,'logs')
eznginxmoduleLogProp = os.path.join(logDirectory,'log4j.properties')
configDirectory = os.path.join(workingDirectory,'conf')
mainConfig = os.path.join(configDirectory,'nginx.conf')
confdDirectory = os.path.join(configDirectory,'conf.d')
manualDirectory = os.path.join(containerDir,'manual')
ezconfig_dir = os.path.join(containerDir, 'config')
htmlRootDir = os.path.join(containerDir, 'static_content')
favicon_file = os.path.join(htmlRootDir, 'ezbstatic', 'images', 'favicon.ico')
# external facing ssl files for nginx
ssl_cadir = os.path.join(ezconfig_dir,'ssl/user_ca_files')
ssl_keyfile = os.path.join(ezconfig_dir,'ssl/server/server.key')
ssl_certfile = os.path.join(ezconfig_dir,'ssl/server/server.crt')
ssl_server_certs = os.path.join(workingDirectory, 'ssl')
ssl_server_certs_dirs = [os.path.join(workingDirectory, 'ssl_a'), os.path.join(workingDirectory, 'ssl_b')]
ssl_cafile = os.path.join(containerDir,'wd','CAchain.pem')
# internal ssl files for thrift service w/in EzBake
ezEtc = os.path.join(containerDir,'etc')
ezcertdir = os.path.join(containerDir,'etc/ezbake/pki/cert/config/ssl')
ez_keyfile = os.path.join(ezcertdir,'application.priv')
ez_cafile = os.path.join(ezcertdir,'ezbakeca.crt')
ez_certfile = os.path.join(ezcertdir,'application.crt')
# Static content directory to serve per site static content
static_contents = os.path.join(containerDir,'ezbappstatic')
static_contents_dirs = [os.path.join(containerDir, 'sc_a'), os.path.join(containerDir, 'sc_b')]
mainConfigTemplate = os.path.join(templateDir,'nginx.conf')
mimeTemplate = os.path.join(templateDir,'mime.types')
mimeConfig = os.path.join(configDirectory,'mime.types')
nginxPidFile = os.path.join(workingDirectory,'nginx_%d.pid' % os.getpid())
shutdownFile = os.path.join(workingDirectory,'delete_this_file_to_shutdown_efe')
ezproxyciphers = "HIGH:!DSS:!aNULL@STRENGTH"
defaultEznginxOps = AuthorizationOperation.USER_INFO
# Restrict access to EzFrontend Thrift services to the following CN
ez_frontend_access = r'_Ez_Deployer|_Ez_EFEUI'
def addGreenlets(thriftService, kzMonitor, cfgChange, shutdown):
global clientService
global zkMonitor
global cfgGreenlet
global wGreenlet
clientService = thriftService
zkMonitor = kzMonitor
cfgGreenlet = cfgChange
wGreenlet = shutdown
def kill():
if clientService:
clientService.kill()
if zkMonitor:
zkMonitor.kill()
if cfgGreenlet:
cfgGreenlet.kill()<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Yith Library Server is a password storage server.
# Copyright (C) 2012-2013 Yaco Sistemas
# Copyright (C) 2012-2013 Alejandro Blanco Escudero <[email protected]>
# Copyright (C) 2012-2013 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
import logging
from yithlibraryserver.user.analytics import get_google_analytics
from yithlibraryserver.user.gravatar import get_gravatar
from yithlibraryserver.user.idp import add_identity_provider
from yithlibraryserver.user.models import User, ExternalIdentity
from yithlibraryserver.user.security import get_user
logger = logging.getLogger(__name__)
def includeme(config):
config.add_directive('add_identity_provider', add_identity_provider)<|fim▁hole|> 'google_analytics', reify=True)
config.add_request_method(get_gravatar, 'gravatar', reify=True)
config.add_route('login', '/login')
config.add_route('register_new_user', '/register')
config.add_route('logout', '/logout')
config.add_route('user_destroy', '/destroy')
config.add_route('user_information', '/profile')
config.add_route('user_preferences', '/preferences')
config.add_route('user_identity_providers', '/identity-providers')
config.add_route('user_send_email_verification_code',
'/send-email-verification-code')
config.add_route('user_verify_email', '/verify-email')
config.add_route('user_google_analytics_preference',
'/google-analytics-preference')
config.add_route('user_view', '/user')
logger.debug('Importing %s model so SQLAlchemy knows about it', User)
logger.debug('Importing %s model so SQLAlchemy knows about it', ExternalIdentity)<|fim▁end|> |
config.add_request_method(get_user, 'user', reify=True)
config.add_request_method(get_google_analytics, |
<|file_name|>backlog.py<|end_file_name|><|fim▁begin|><|fim▁hole|>taiga_ncurses.ui.views.backlog
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
import urwid
from taiga_ncurses.ui.widgets import generic, backlog
from . import base
class ProjectBacklogSubView(base.SubView):
help_popup_title = "Backlog Help Info"
help_popup_info = base.SubView.help_popup_info + (
( "Backlog Movements:", (
("↑ | k | ctrl p", "Move Up"),
("↓ | j | ctrl n", "Move Down"),
("← | h | ctrl b", "Move Left"),
("→ | l | ctrl f", "Move Right"),
)),
( "User Stories Actions:", (
("n", "Create new US"),
("N", "Create new USs in bulk"),
("e", "Edit selected US"),
("Supr", "Delete selected US"),
("K", "Move selected US up"),
("J", "Move selected US down"),
("w", "Save the position of all USs"),
("m", "Move selected US to a Milestone"),
("r", "Refresh the screen")
)),
)
def __init__(self, parent_view, project, notifier, tabs):
super().__init__(parent_view)
self.project = project
self.notifier = notifier
self.stats = backlog.BacklogStats(project)
self.user_stories = backlog.UserStoryList(project)
list_walker = urwid.SimpleFocusListWalker([
tabs,
generic.box_solid_fill(" ", 1),
self.stats,
generic.box_solid_fill(" ", 1),
self.user_stories
])
list_walker.set_focus(4)
self.widget = urwid.ListBox(list_walker)
def open_user_story_form(self, user_story={}):
self.user_story_form = backlog.UserStoryForm(self.project, user_story=user_story)
# FIXME: Calculate the form size
self.parent.show_widget_on_top(self.user_story_form, 80, 24)
def close_user_story_form(self):
del self.user_story_form
self.parent.hide_widget_on_top()
def get_user_story_form_data(self):
data = {}
if hasattr(self, "user_story_form"):
data.update({
"subject": self.user_story_form.subject,
"milestone": self.user_story_form.milestone,
"points": self.user_story_form.points,
"status": self.user_story_form.status,
"is_blocked": self.user_story_form.is_blocked,
"blocked_note": self.user_story_form.blocked_note,
"tags": self.user_story_form.tags,
"description": self.user_story_form.description,
"team_requirement": self.user_story_form.team_requirement,
"client_requirement": self.user_story_form.client_requirement,
"project": self.project["id"],
})
return data
def open_user_stories_in_bulk_form(self):
self.user_stories_in_bulk_form = backlog.UserStoriesInBulkForm(self.project)
# FIXME: Calculate the form size
self.parent.show_widget_on_top(self.user_stories_in_bulk_form, 80, 24)
def close_user_stories_in_bulk_form(self):
del self.user_stories_in_bulk_form
self.parent.hide_widget_on_top()
def get_user_stories_in_bulk_form_data(self):
data = {}
if hasattr(self, "user_stories_in_bulk_form"):
data.update({
"bulkStories": self.user_stories_in_bulk_form.subjects,
"projectId": self.project["id"],
})
return data
def open_milestones_selector_popup(self, user_story={}):
self.milestone_selector_popup = backlog.MIlestoneSelectorPopup(self.project, user_story)
# FIXME: Calculate the popup size
self.parent.show_widget_on_top(self.milestone_selector_popup, 100, 30)
def close_milestone_selector_popup(self):
del self.milestone_selector_popup
self.parent.hide_widget_on_top()<|fim▁end|> | # -*- coding: utf-8 -*-
""" |
<|file_name|>apt.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# vim:fenc=utf-8<|fim▁hole|>#
# Copyright (C) 2020 Freie Universität Berlin
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import subprocess
from .base import Installer
__author__ = "Martine S. Lenders"
__copyright__ = "Copyright (C) 2020 Freie Universität Berlin"
__credits__ = ["Martine S. Lenders"]
__license__ = "LGPLv2.1"
__maintainer__ = "Martine S. Lenders"
__email__ = "[email protected]"
class Apt(Installer):
def _install(self, package):
subprocess.run(["apt-get", "-y", "install",
package[self.os]["name"]])<|fim▁end|> | |
<|file_name|>search.rs<|end_file_name|><|fim▁begin|>// Copyright (c) 2019, Ben Boeckel
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of this project nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
use std::iter;
use crate::keytypes::User;
use crate::Permission;
use super::utils;
use super::utils::kernel::*;
use super::utils::keys::*;
#[test]
fn empty_key_type() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_key::<EmptyKey, _, _>("empty_key_type", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EINVAL));
}
#[test]
fn unsupported_key_type() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_key::<UnsupportedKey, _, _>("unsupported_key_type", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn invalid_key_type() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_key::<InvalidKey, _, _>("invalid_key_type", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EPERM));
}
#[test]
fn max_key_type() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_key::<MaxLenKey, _, _>("invalid_key_type", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn overlong_key_type() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_key::<OverlongKey, _, _>("overlong_key_type", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EINVAL));
}
#[test]
fn max_user_description() {
let keyring = utils::new_test_keyring();
// Subtract one because the NUL is added in the kernel API.
let maxdesc: String = iter::repeat('a').take(*PAGE_SIZE - 1).collect();
let err = keyring
.search_for_key::<User, _, _>(maxdesc, None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn overlong_user_description() {
let keyring = utils::new_test_keyring();
// On MIPS with < 3.19, there is a bug where this is allowed. 3.19 was released in Feb 2015,
// so this is being ignored here.
let maxdesc: String = iter::repeat('a').take(*PAGE_SIZE).collect();
let err = keyring
.search_for_key::<User, _, _>(maxdesc, None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EINVAL));
}
#[test]
fn invalid_keyring() {
let keyring = utils::invalid_keyring();
let err = keyring
.search_for_key::<User, _, _>("invalid_keyring", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EINVAL));
}
#[test]
fn search_key() {
let mut keyring = utils::new_test_keyring();
let payload = &b"payload"[..];
let key = keyring
.add_key::<User, _, _>("search_key", payload)
.unwrap();
let not_a_keyring = utils::key_as_keyring(&key);
let err = not_a_keyring
.search_for_key::<User, _, _>("search_key", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOTDIR));
}
#[test]
fn search_key_no_result() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_key::<User, _, _>("search_key_no_result", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn search_keyring_no_result() {
let keyring = utils::new_test_keyring();
let err = keyring
.search_for_keyring("search_keyring_no_result", None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn search_key_mismatched_type() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring.add_keyring("search_key_mismatched_type").unwrap();
let description = "search_key_mismatched_type_keyring";
let _ = new_keyring.add_keyring(description).unwrap();
let err = keyring
.search_for_key::<User, _, _>(description, None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn search_keyring_mismatched_type() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_keyring_mismatched_type")
.unwrap();
let description = "search_keyring_mismatched_type_key";
let payload = &b"payload"[..];
let _ = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let err = keyring.search_for_keyring(description, None).unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn search_and_find_key() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring.add_keyring("search_and_find_key").unwrap();
let description = "search_and_find_key_key";
let payload = &b"payload"[..];
let key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let found_key = keyring
.search_for_key::<User, _, _>(description, None)
.unwrap();
assert_eq!(found_key, key);
let actual_payload = key.read().unwrap();
assert_eq!(payload, actual_payload.as_slice());
}
#[test]
fn search_and_find_keyring() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring.add_keyring("search_and_find_keyring").unwrap();
let description = "search_and_find_keyring_keyring";
let target_keyring = new_keyring.add_keyring(description).unwrap();
let found_keyring = keyring.search_for_keyring(description, None).unwrap();
assert_eq!(found_keyring, target_keyring);
}
#[test]
fn search_and_find_key_no_search_perm_interm() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_no_search_perm_interm")
.unwrap();
let description = "search_and_find_key_no_search_perm_interm_key";
let payload = &b"payload"[..];
let _ = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let perms = {
let mut orig_perms = new_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_SEARCH);
orig_perms.remove(Permission::USER_SEARCH);
orig_perms.remove(Permission::GROUP_SEARCH);
orig_perms.remove(Permission::OTHER_SEARCH);
orig_perms
};
new_keyring.set_permissions(perms).unwrap();
let err = keyring
.search_for_key::<User, _, _>(description, None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn search_and_find_keyring_no_search_perm_interm() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_no_search_perm_interm")
.unwrap();
let description = "search_and_find_keyring_no_search_perm_interm_keyring";
let _ = new_keyring.add_keyring(description).unwrap();
let perms = {
let mut orig_perms = new_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_SEARCH);
orig_perms.remove(Permission::USER_SEARCH);
orig_perms.remove(Permission::GROUP_SEARCH);
orig_perms.remove(Permission::OTHER_SEARCH);
orig_perms
};
new_keyring.set_permissions(perms).unwrap();
let err = keyring.search_for_keyring(description, None).unwrap_err();
assert_eq!(err, errno::Errno(libc::ENOKEY));
}
#[test]
fn search_and_find_key_no_search_perm_direct() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_no_search_perm_direct")
.unwrap();
let description = "search_and_find_key_no_search_perm_direct_key";
let payload = &b"payload"[..];
let mut key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let perms = {
let mut orig_perms = key.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_SEARCH);
orig_perms.remove(Permission::USER_SEARCH);
orig_perms.remove(Permission::GROUP_SEARCH);
orig_perms.remove(Permission::OTHER_SEARCH);
orig_perms
};
key.set_permissions(perms).unwrap();
let err = keyring
.search_for_key::<User, _, _>(description, None)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EACCES));
}
#[test]
fn search_and_find_keyring_no_search_perm_direct() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_no_search_perm_direct")
.unwrap();
let description = "search_and_find_keyring_no_search_perm_direct_keyring";
let mut target_keyring = new_keyring.add_keyring(description).unwrap();
let perms = {
let mut orig_perms = target_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_SEARCH);
orig_perms.remove(Permission::USER_SEARCH);
orig_perms.remove(Permission::GROUP_SEARCH);
orig_perms.remove(Permission::OTHER_SEARCH);
orig_perms
};
target_keyring.set_permissions(perms).unwrap();
let err = keyring.search_for_keyring(description, None).unwrap_err();
assert_eq!(err, errno::Errno(libc::EACCES));
}
#[test]
fn search_and_find_key_link() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring.add_keyring("search_and_find_key_link").unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_key_link_destination")
.unwrap();
let description = "search_and_find_key_link_key";
let payload = &b"payload"[..];
let key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert!(keyrings.is_empty());
let found_key = keyring
.search_for_key::<User, _, _>(description, &mut destination_keyring)
.unwrap();
assert_eq!(found_key, key);
let actual_payload = key.read().unwrap();
assert_eq!(payload, actual_payload.as_slice());
let (keys, keyrings) = destination_keyring.read().unwrap();
assert_eq!(keys.len(), 1);
assert_eq!(keys[0], key);
assert!(keyrings.is_empty());
}
#[test]
fn search_and_find_keyring_link() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring.add_keyring("search_and_find_keyring_link").unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_keyring_link_destination")
.unwrap();
let description = "search_and_find_keyring_link_keyring";
let target_keyring = new_keyring.add_keyring(description).unwrap();
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert!(keyrings.is_empty());
let found_keyring = keyring
.search_for_keyring(description, &mut destination_keyring)
.unwrap();
assert_eq!(found_keyring, target_keyring);
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert_eq!(keyrings.len(), 1);
assert_eq!(keyrings[0], target_keyring);
}
#[test]
fn search_and_find_key_link_replace() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_link_replace")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_key_link_replace_destination")
.unwrap();
let description = "search_and_find_key_link_replace_key";
let payload = &b"payload"[..];
let key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let other_payload = &b"payload"[..];
let orig_key = destination_keyring
.add_key::<User, _, _>(description, other_payload)
.unwrap();
let (keys, keyrings) = destination_keyring.read().unwrap();
assert_eq!(keys.len(), 1);
assert_eq!(keys[0], orig_key);
assert!(keyrings.is_empty());
let found_key = keyring
.search_for_key::<User, _, _>(description, &mut destination_keyring)
.unwrap();
assert_eq!(found_key, key);
let actual_payload = key.read().unwrap();
assert_eq!(payload, actual_payload.as_slice());
// The original key should have been replaced.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert_eq!(keys.len(), 1);
assert_eq!(keys[0], key);
assert!(keyrings.is_empty());
}
#[test]
fn search_and_find_key_link_replace_keyring() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_link_replace_keyring")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_key_link_replace_keyring_destination")
.unwrap();
let description = "search_and_find_key_link_replace_keyring_key";
let payload = &b"payload"[..];
let key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let orig_keyring = destination_keyring.add_keyring(description).unwrap();
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert_eq!(keyrings.len(), 1);
assert_eq!(keyrings[0], orig_keyring);
let found_key = keyring
.search_for_key::<User, _, _>(description, &mut destination_keyring)
.unwrap();
assert_eq!(found_key, key);
let actual_payload = key.read().unwrap();
assert_eq!(payload, actual_payload.as_slice());
// The original keyring should not have been replaced.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert_eq!(keys.len(), 1);
assert_eq!(keys[0], key);
assert_eq!(keyrings.len(), 1);
assert_eq!(keyrings[0], orig_keyring);
}
#[test]<|fim▁hole|> let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_link_replace")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_keyring_link_replace_destination")
.unwrap();
let description = "search_and_find_keyring_link_replace_keyring";
let target_keyring = new_keyring.add_keyring(description).unwrap();
let orig_keyring = destination_keyring.add_keyring(description).unwrap();
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert_eq!(keyrings.len(), 1);
assert_eq!(keyrings[0], orig_keyring);
let found_keyring = keyring
.search_for_keyring(description, &mut destination_keyring)
.unwrap();
assert_eq!(found_keyring, target_keyring);
// The original keyring should have been replaced.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert_eq!(keyrings.len(), 1);
assert_eq!(keyrings[0], target_keyring);
}
#[test]
fn search_and_find_keyring_link_replace_key() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_link_replace_key")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_keyring_link_replace_key_destination")
.unwrap();
let description = "search_and_find_keyring_link_replace_key_keyring";
let target_keyring = new_keyring.add_keyring(description).unwrap();
let payload = &b"payload"[..];
let orig_key = destination_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let (keys, keyrings) = destination_keyring.read().unwrap();
assert_eq!(keys.len(), 1);
assert_eq!(keys[0], orig_key);
assert!(keyrings.is_empty());
let found_keyring = keyring
.search_for_keyring(description, &mut destination_keyring)
.unwrap();
assert_eq!(found_keyring, target_keyring);
// The original keyring should not have been replaced.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert_eq!(keys.len(), 1);
assert_eq!(keys[0], orig_key);
assert_eq!(keyrings.len(), 1);
assert_eq!(keyrings[0], target_keyring);
}
#[test]
fn search_and_find_key_no_link_perm_no_dest() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_no_link_perm_no_dest")
.unwrap();
let description = "search_and_find_key_no_link_perm_no_dest_key";
let payload = &b"payload"[..];
let mut key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let perms = {
let mut orig_perms = key.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_LINK);
orig_perms.remove(Permission::USER_LINK);
orig_perms.remove(Permission::GROUP_LINK);
orig_perms.remove(Permission::OTHER_LINK);
orig_perms
};
key.set_permissions(perms).unwrap();
let found_key = keyring
.search_for_key::<User, _, _>(description, None)
.unwrap();
assert_eq!(found_key, key);
let actual_payload = key.read().unwrap();
assert_eq!(payload, actual_payload.as_slice());
}
#[test]
fn search_and_find_keyring_no_link_perm_no_dest() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_no_link_perm_no_dest")
.unwrap();
let description = "search_and_find_keyring_no_link_perm_no_dest_keyring";
let mut target_keyring = new_keyring.add_keyring(description).unwrap();
let perms = {
let mut orig_perms = target_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_LINK);
orig_perms.remove(Permission::USER_LINK);
orig_perms.remove(Permission::GROUP_LINK);
orig_perms.remove(Permission::OTHER_LINK);
orig_perms
};
target_keyring.set_permissions(perms).unwrap();
let found_keyring = keyring.search_for_keyring(description, None).unwrap();
assert_eq!(found_keyring, target_keyring);
}
#[test]
fn search_and_find_key_no_link_perm() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_no_link_perm")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_key_no_link_perm_destination")
.unwrap();
let description = "search_and_find_key_no_link_perm_key";
let payload = &b"payload"[..];
let mut key = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let perms = {
let mut orig_perms = key.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_LINK);
orig_perms.remove(Permission::USER_LINK);
orig_perms.remove(Permission::GROUP_LINK);
orig_perms.remove(Permission::OTHER_LINK);
orig_perms
};
key.set_permissions(perms).unwrap();
let err = keyring
.search_for_key::<User, _, _>(description, &mut destination_keyring)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EACCES));
// Assert that it was not linked to the destination keyring.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert!(keyrings.is_empty());
}
#[test]
fn search_and_find_keyring_no_link_perm() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_no_link_perm")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_keyring_no_link_perm_destination")
.unwrap();
let description = "search_and_find_keyring_no_link_perm_keyring";
let mut target_keyring = new_keyring.add_keyring(description).unwrap();
let perms = {
let mut orig_perms = target_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_LINK);
orig_perms.remove(Permission::USER_LINK);
orig_perms.remove(Permission::GROUP_LINK);
orig_perms.remove(Permission::OTHER_LINK);
orig_perms
};
target_keyring.set_permissions(perms).unwrap();
let err = keyring
.search_for_keyring(description, &mut destination_keyring)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EACCES));
// Assert that it was not linked to the destination keyring.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert!(keyrings.is_empty());
}
#[test]
fn search_and_find_key_no_write_perm() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_key_no_write_perm")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_key_no_write_perm_destination")
.unwrap();
let description = "search_and_find_key_no_write_perm_key";
let payload = &b"payload"[..];
let _ = new_keyring
.add_key::<User, _, _>(description, payload)
.unwrap();
let perms = {
let mut orig_perms = destination_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_WRITE);
orig_perms.remove(Permission::USER_WRITE);
orig_perms.remove(Permission::GROUP_WRITE);
orig_perms.remove(Permission::OTHER_WRITE);
orig_perms
};
destination_keyring.set_permissions(perms).unwrap();
let err = keyring
.search_for_key::<User, _, _>(description, &mut destination_keyring)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EACCES));
// Assert that it was not linked to the destination keyring.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert!(keyrings.is_empty());
}
#[test]
fn search_and_find_keyring_no_write_perm() {
let mut keyring = utils::new_test_keyring();
let mut new_keyring = keyring
.add_keyring("search_and_find_keyring_no_write_perm")
.unwrap();
let mut destination_keyring = keyring
.add_keyring("search_and_find_keyring_no_write_perm_destination")
.unwrap();
let description = "search_and_find_keyring_no_write_perm_keyring";
let _ = new_keyring.add_keyring(description).unwrap();
let perms = {
let mut orig_perms = destination_keyring.description().unwrap().perms;
orig_perms.remove(Permission::POSSESSOR_WRITE);
orig_perms.remove(Permission::USER_WRITE);
orig_perms.remove(Permission::GROUP_WRITE);
orig_perms.remove(Permission::OTHER_WRITE);
orig_perms
};
destination_keyring.set_permissions(perms).unwrap();
let err = keyring
.search_for_keyring(description, &mut destination_keyring)
.unwrap_err();
assert_eq!(err, errno::Errno(libc::EACCES));
// Assert that it was not linked to the destination keyring.
let (keys, keyrings) = destination_keyring.read().unwrap();
assert!(keys.is_empty());
assert!(keyrings.is_empty());
}<|fim▁end|> | fn search_and_find_keyring_link_replace() { |
<|file_name|>frequency_status.rs<|end_file_name|><|fim▁begin|>use rosrust::Duration;
use rosrust_diagnostics::{FrequencyStatus, Level, Status, Task};
mod util;
#[test]
fn frequency_status_test() {
let _roscore = util::run_roscore_for(util::Feature::FrequencyStatusTest);
rosrust::init("frequency_status_test");
let fs = FrequencyStatus::builder()
.window_size(2)
.min_frequency(10.0)
.max_frequency(20.0)
.tolerance(0.5)
.build();
fs.tick();
rosrust::sleep(Duration::from_nanos(20_000_000));<|fim▁hole|> rosrust::sleep(Duration::from_nanos(50_000_000));
fs.tick();
let mut status1 = Status::default();
fs.run(&mut status1);
rosrust::sleep(Duration::from_nanos(300_000_000));
fs.tick();
let mut status2 = Status::default();
fs.run(&mut status2);
rosrust::sleep(Duration::from_nanos(150_000_000));
fs.tick();
let mut status3 = Status::default();
fs.run(&mut status3);
fs.clear();
let mut status4 = Status::default();
fs.run(&mut status4);
assert_eq!(
status0.level,
Level::Warn,
"Max frequency exceeded but not reported"
);
assert_eq!(
status1.level,
Level::Ok,
"Within max frequency but reported error"
);
assert_eq!(
status2.level,
Level::Ok,
"Within min frequency but reported error"
);
assert_eq!(
status3.level,
Level::Warn,
"Min frequency exceeded but not reported"
);
assert_eq!(status4.level, Level::Error, "Freshly cleared should fail");
assert_eq!(
status0.name, "",
"Name should not be set by FrequencyStatus"
);
assert_eq!(
fs.name(),
"Frequency Status",
"Name should be \"Frequency Status\""
);
}<|fim▁end|> | let mut status0 = Status::default();
fs.run(&mut status0); |
<|file_name|>TaskManagerPlugin.py<|end_file_name|><|fim▁begin|>""" Container for TaskManager plug-ins, to handle the destination of the tasks
"""
import six
from DIRAC import gLogger
from DIRAC.Core.Utilities.List import fromChar
from DIRAC.Core.Utilities.SiteSEMapping import getSitesForSE
from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getSites
from DIRAC.TransformationSystem.Client.PluginBase import PluginBase
class TaskManagerPlugin(PluginBase):
"""A TaskManagerPlugin object should be instantiated by every TaskManager object.
self.params here could be
{'Status': 'Created', 'TargetSE': 'Unknown', 'TransformationID': 1086L, 'RunNumber': 0L,
'Site': 'DIRAC.Test.ch', 'TaskID': 21L, 'InputData': '', 'JobType': 'MCSimulation'}
which corresponds to paramsDict in TaskManager (which is in fact a tasks dict)
"""
def _BySE(self):
"""Matches using TargetSE. This is the standard plugin."""
destSites = set()
try:
seList = ["Unknown"]
if self.params["TargetSE"]:
if isinstance(self.params["TargetSE"], six.string_types):
seList = fromChar(self.params["TargetSE"])
elif isinstance(self.params["TargetSE"], list):
seList = self.params["TargetSE"]
except KeyError:
pass
if not seList or seList == ["Unknown"]:
return destSites
for se in seList:
res = getSitesForSE(se)
if not res["OK"]:
gLogger.warn("Could not get Sites associated to SE", res["Message"])
else:
thisSESites = res["Value"]
if thisSESites:
# We make an OR of the possible sites
destSites.update(thisSESites)
gLogger.debug("Destinations: %s" % ",".join(destSites))
return destSites
def _ByJobType(self):
"""By default, all sites are allowed to do every job.
The actual rules are freely specified in the Operation JobTypeMapping section.
The content of the section may look like this:
User
{
Exclude = PAK
Exclude += Ferrara
Exclude += Bologna<|fim▁hole|> Exclude += CERN
Exclude += IN2P3
Allow
{
Paris = IN2P3
CERN = CERN
IN2P3 = IN2P3
}
}
DataReconstruction
{
Exclude = PAK
Exclude += Ferrara
Exclude += CERN
Exclude += IN2P3
Allow
{
Ferrara = CERN
CERN = CERN
IN2P3 = IN2P3
IN2P3 += CERN
}
}
Merge
{
Exclude = ALL
Allow
{
CERN = CERN
IN2P3 = IN2P3
}
}
The sites in the exclusion list will be removed.
The allow section says where each site may help another site
"""
# 1. get sites list
res = getSites()
if not res["OK"]:
gLogger.error("Could not get the list of sites", res["Message"])
return res
destSites = set(res["Value"])
# 2. get JobTypeMapping "Exclude" value (and add autoAddedSites)
gLogger.debug("Getting JobTypeMapping 'Exclude' value (and add autoAddedSites)")
jobType = self.params["JobType"]
if not jobType:
raise RuntimeError("No jobType specified")
excludedSites = set(self.opsH.getValue("JobTypeMapping/%s/Exclude" % jobType, []))
gLogger.debug("Explicitly excluded sites for %s task: %s" % (jobType, ",".join(excludedSites)))
autoAddedSites = self.opsH.getValue("JobTypeMapping/AutoAddedSites", [])
if "WithStorage" in autoAddedSites:
# Add all sites with storage, such that jobs can run wherever data is
autoAddedSites.remove("WithStorage")
autoAddedSites += DMSHelpers().getTiers(withStorage=True)
# 3. removing sites in Exclude
if not excludedSites:
pass
elif "ALL" in excludedSites:
destSites = set()
else:
destSites -= excludedSites
# 4. get JobTypeMapping "Allow" section
res = self.opsH.getOptionsDict("JobTypeMapping/%s/Allow" % jobType)
if not res["OK"]:
gLogger.debug(res["Message"])
allowed = {}
else:
allowed = dict((site, set(fromChar(fromSites))) for site, fromSites in res["Value"].items())
autoAddedSites = set(self.opsH.getValue("JobTypeMapping/%s/AutoAddedSites" % jobType, autoAddedSites))
gLogger.debug("Auto-added sites for %s task: %s" % (jobType, ",".join(autoAddedSites)))
# 5. add autoAddedSites, if requested
for autoAddedSite in autoAddedSites:
allowed.setdefault(autoAddedSite, set()).add(autoAddedSite)
gLogger.debug("Allowed sites for %s task: %s" % (jobType, ",".join(allowed)))
# 6. Allowing sites that should be allowed
taskSiteDestination = self._BySE()
for destSite, fromSites in allowed.items():
for fromSite in fromSites:
if not taskSiteDestination or fromSite in taskSiteDestination:
destSites.add(destSite)
gLogger.debug(
"Computed list of destination sites for %s task with TargetSE %s: %s"
% (jobType, self.params["TargetSE"], ",".join(destSites))
)
return destSites<|fim▁end|> | Exclude += Paris |
<|file_name|>bench.rs<|end_file_name|><|fim▁begin|>#![feature(test)]
extern crate "basic-hll" as hll;
extern crate test;
#[cfg(test)]
mod tests {
use hll::*;
use test::Bencher;
#[bench]
fn bench_string_addition(b: &mut Bencher) {
b.iter(|| {
let mut hll = HLL::ctor(0.0040625);
let upper = 1000;
let mut counter = 0;
loop {<|fim▁hole|> if counter > upper {
break;
}
counter = counter + 1;
hll.insert(&counter);
}
hll.count();
});
}
}<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(feature = "no_std", no_std)]
#[macro_use]
extern crate cfg_if;
cfg_if! (
if #[cfg(feature="no_std")] {
use core::str::FromStr;
} else {
use std::str::FromStr;
}
);
mod char;
mod int;
pub use char::TryFromIntToCharError;
pub use int::TryFromIntError;
pub trait TryFrom<T>: Sized {
type Err;
fn try_from(T) -> Result<Self, Self::Err>;
}
pub trait TryInto<T>: Sized {
type Err;
fn try_into(self) -> Result<T, Self::Err>;
}
impl<T, U> TryInto<U> for T
where
U: TryFrom<T>,
{
type Err = U::Err;
fn try_into(self) -> Result<U, U::Err> {
U::try_from(self)
}
}
impl<'a, T> TryFrom<&'a str> for T
where
T: FromStr,
{
type Err = T::Err;
fn try_from(string: &'a str) -> Result<Self, Self::Err> {
T::from_str(string)
}
}
<|fim▁hole|>#[cfg(test)]
mod tests {
use super::*;
#[test]
fn should_have_try_from_impl_for_from_str() {
let result = u32::try_from("3");
assert_eq!(result.unwrap(), 3)
}
#[test]
fn should_have_try_from_impl_for_from_str_that_handles_err() {
let result = u32::try_from("hello");
assert_eq!(
format!("{}", result.unwrap_err()),
"invalid digit found in string"
)
}
#[test]
fn should_have_try_into_impl_for_from_str() {
let result: Result<u32, _> = "3".try_into();
assert_eq!(result.unwrap(), 3)
}
}
/// Error type used when conversion is infallible.
/// The never type (`!`) will replace this when it is available in stable Rust.
#[derive(Debug, Eq, PartialEq)]
pub enum Void {}<|fim▁end|> | |
<|file_name|>EmbedBuilder.java<|end_file_name|><|fim▁begin|>package discord.jar;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
public class EmbedBuilder {
private String title;
private String type;
private String description;
private String url;
private Color color;
private Embed.EmbedFooter footer;
private Embed.EmbedImage image;
private Embed.EmbedImage thumbnail;
private Embed.EmbedMedia video;
private Embed.EmbedProvider provider;
private Embed.EmbedAuthor author;
private List<Embed.EmbedField> fields = new ArrayList<>();
public EmbedBuilder withTitle(String title) {
this.title = title;
return this;
}
public EmbedBuilder withType(String type) {
this.type = type;
return this;
}
public EmbedBuilder withDescription(String description) {
this.description = description;
return this;
}
public EmbedBuilder withUrl(String url) {
this.url = url;
return this;
}
public EmbedBuilder withColor(Color color) {
this.color = color;
return this;
}
public EmbedBuilder withFooter(Embed.EmbedFooter footer) {
this.footer = footer;
return this;
}
public EmbedBuilder withFooter(String text, String iconUrl) {
this.footer = new Embed.EmbedFooter(text, iconUrl, null);
return this;
}
public EmbedBuilder withImage(Embed.EmbedImage image) {
this.image = image;
return this;
}
public EmbedBuilder withImage(String url) {
this.image = new Embed.EmbedImage(url, null, -1, -1);
return this;
}
public EmbedBuilder withThumbnail(Embed.EmbedImage thumbnail) {<|fim▁hole|> public EmbedBuilder withThumbnail(String url) {
this.thumbnail = new Embed.EmbedImage(url, null, -1, -1);
return this;
}
public EmbedBuilder withVideo(Embed.EmbedMedia video) {
this.video = video;
return this;
}
public EmbedBuilder withVideo(String url) {
this.video = new Embed.EmbedMedia(url, -1, -1);
return this;
}
public EmbedBuilder withProvider(Embed.EmbedProvider provider) {
this.provider = provider;
return this;
}
public EmbedBuilder withProvider(String name, String url) {
this.provider = new Embed.EmbedProvider(name, url);
return this;
}
public EmbedBuilder withAuthor(Embed.EmbedAuthor author) {
this.author = author;
return this;
}
public EmbedBuilder withAuthor(String name, String url, String iconUrl) {
this.author = new Embed.EmbedAuthor(name, url, iconUrl, null);
return this;
}
public EmbedBuilder appendField(Embed.EmbedField field) {
this.fields.add(field);
return this;
}
public EmbedBuilder appendField(String name, String value, boolean inline) {
this.fields.add(new Embed.EmbedField(name, value, inline));
return this;
}
public Embed build() {
return new Embed(title, type, description, url, color, footer, image, thumbnail, video, provider, author, fields.toArray(new Embed.EmbedField[0]));
}
}<|fim▁end|> | this.thumbnail = thumbnail;
return this;
}
|
<|file_name|>httpsink.go<|end_file_name|><|fim▁begin|>package sfxclient
import (
"bytes"
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"runtime"
"strings"
"sync/atomic"
"time"
"unicode"
"github.com/golang/protobuf/proto"
"github.com/signalfx/com_signalfx_metrics_protobuf"
"github.com/signalfx/golib/datapoint"
"github.com/signalfx/golib/errors"
"golang.org/x/net/context"
)<|fim▁hole|>// ClientVersion is the version of this library and is embedded into the user agent
const ClientVersion = "1.0"
// IngestEndpointV2 is the v2 version of the signalfx ingest endpoint
const IngestEndpointV2 = "https://ingest.signalfx.com/v2/datapoint"
// DefaultUserAgent is the UserAgent string sent to signalfx
var DefaultUserAgent = fmt.Sprintf("golib-sfxclient/%s (gover %s)", ClientVersion, runtime.Version())
// DefaultTimeout is the default time to fail signalfx datapoint requests if they don't succeed
const DefaultTimeout = time.Second * 5
// HTTPDatapointSink will accept signalfx datapoints and forward them to signalfx via HTTP
type HTTPDatapointSink struct {
AuthToken string
UserAgent string
Endpoint string
Client http.Client
protoMarshaler func(pb proto.Message) ([]byte, error)
stats struct {
readingBody int64
}
}
var _ Sink = &HTTPDatapointSink{}
// TokenHeaderName is the header key for the auth token in the HTTP request
const TokenHeaderName = "X-Sf-Token"
// NewHTTPDatapointSink creates a default NewHTTPDatapointSink using package level constants
func NewHTTPDatapointSink() *HTTPDatapointSink {
return &HTTPDatapointSink{
UserAgent: DefaultUserAgent,
Endpoint: IngestEndpointV2,
Client: http.Client{
Timeout: DefaultTimeout,
Transport: http.DefaultTransport,
},
protoMarshaler: proto.Marshal,
}
}
// AddDatapoints forwards the datapoints to signalfx
func (h *HTTPDatapointSink) AddDatapoints(ctx context.Context, points []*datapoint.Datapoint) (err error) {
if len(points) == 0 {
return nil
}
if ctx.Err() != nil {
return errors.Annotate(ctx.Err(), "context already closed")
}
body, err := h.encodePostBodyProtobufV2(points)
if err != nil {
return errors.Annotate(err, "cannot encode datapoints into protocol buffers")
}
req, err := http.NewRequest("POST", h.Endpoint, bytes.NewBuffer(body))
if err != nil {
return errors.Annotatef(err, "cannot parse new HTTP request to %s", h.Endpoint)
}
req.Header.Set("Content-Type", "application/x-protobuf")
req.Header.Set(TokenHeaderName, h.AuthToken)
req.Header.Set("User-Agent", h.UserAgent)
req.Header.Set("Connection", "Keep-Alive")
return h.withCancel(ctx, req)
}
func (h *HTTPDatapointSink) handleResponse(resp *http.Response, respErr error) (err error) {
if respErr != nil {
return errors.Annotatef(respErr, "failed to send/recieve http request")
}
defer func() {
closeErr := errors.Annotate(resp.Body.Close(), "failed to close response body")
err = errors.NewMultiErr([]error{err, closeErr})
}()
atomic.AddInt64(&h.stats.readingBody, 1)
respBody, err := ioutil.ReadAll(resp.Body)
if err != nil {
return errors.Annotate(err, "cannot fully read response body")
}
if resp.StatusCode != http.StatusOK {
return errors.Errorf("invalid status code %d", resp.StatusCode)
}
var bodyStr string
err = json.Unmarshal(respBody, &bodyStr)
if err != nil {
return errors.Annotatef(err, "cannot unmarshal response body %s", respBody)
}
if bodyStr != "OK" {
return errors.Errorf("invalid response body %s", bodyStr)
}
return nil
}
var toMTMap = map[datapoint.MetricType]com_signalfx_metrics_protobuf.MetricType{
datapoint.Counter: com_signalfx_metrics_protobuf.MetricType_CUMULATIVE_COUNTER,
datapoint.Count: com_signalfx_metrics_protobuf.MetricType_COUNTER,
datapoint.Enum: com_signalfx_metrics_protobuf.MetricType_GAUGE,
datapoint.Gauge: com_signalfx_metrics_protobuf.MetricType_GAUGE,
datapoint.Rate: com_signalfx_metrics_protobuf.MetricType_GAUGE,
datapoint.Timestamp: com_signalfx_metrics_protobuf.MetricType_GAUGE,
}
func toMT(mt datapoint.MetricType) com_signalfx_metrics_protobuf.MetricType {
ret, exists := toMTMap[mt]
if exists {
return ret
}
panic(fmt.Sprintf("Unknown metric type: %d\n", mt))
}
func datumForPoint(pv datapoint.Value) *com_signalfx_metrics_protobuf.Datum {
switch t := pv.(type) {
case datapoint.IntValue:
x := t.Int()
return &com_signalfx_metrics_protobuf.Datum{IntValue: &x}
case datapoint.FloatValue:
x := t.Float()
return &com_signalfx_metrics_protobuf.Datum{DoubleValue: &x}
default:
x := t.String()
return &com_signalfx_metrics_protobuf.Datum{StrValue: &x}
}
}
func mapToDimensions(dimensions map[string]string) []*com_signalfx_metrics_protobuf.Dimension {
ret := make([]*com_signalfx_metrics_protobuf.Dimension, 0, len(dimensions))
for k, v := range dimensions {
if k == "" || v == "" {
continue
}
// If someone knows a better way to do this, let me know. I can't just take the &
// of k and v because their content changes as the range iterates
copyOfK := filterSignalfxKey(string([]byte(k)))
copyOfV := (string([]byte(v)))
ret = append(ret, (&com_signalfx_metrics_protobuf.Dimension{
Key: ©OfK,
Value: ©OfV,
}))
}
return ret
}
func filterSignalfxKey(str string) string {
return strings.Map(runeFilterMap, str)
}
func runeFilterMap(r rune) rune {
if unicode.IsDigit(r) || unicode.IsLetter(r) || r == '_' {
return r
}
return '_'
}
func (h *HTTPDatapointSink) coreDatapointToProtobuf(point *datapoint.Datapoint) *com_signalfx_metrics_protobuf.DataPoint {
m := point.Metric
var ts int64
if point.Timestamp.IsZero() {
ts = 0
} else {
ts = point.Timestamp.UnixNano() / time.Millisecond.Nanoseconds()
}
mt := toMT(point.MetricType)
v := &com_signalfx_metrics_protobuf.DataPoint{
Metric: &m,
Timestamp: &ts,
Value: datumForPoint(point.Value),
MetricType: &mt,
Dimensions: mapToDimensions(point.Dimensions),
}
return v
}
func (h *HTTPDatapointSink) encodePostBodyProtobufV2(datapoints []*datapoint.Datapoint) ([]byte, error) {
dps := make([]*com_signalfx_metrics_protobuf.DataPoint, 0, len(datapoints))
for _, dp := range datapoints {
dps = append(dps, h.coreDatapointToProtobuf(dp))
}
msg := &com_signalfx_metrics_protobuf.DataPointUploadMessage{
Datapoints: dps,
}
body, err := h.protoMarshaler(msg)
if err != nil {
return nil, errors.Annotate(err, "protobuf marshal failed")
}
return body, nil
}<|fim▁end|> | |
<|file_name|>layout.js<|end_file_name|><|fim▁begin|>'use strict';
const path = require('path');
const
srcDir = path.join(__dirname, 'src/front'),
distDir = path.join(__dirname, 'front/dist');
module.exports = {<|fim▁hole|> jsEntry: path.join(srcDir, 'js/index.js'),
htmlEntry: path.join(srcDir, 'index.html'),
stylesEntry: path.join(srcDir, 'scss/style.scss')
}
},
dist: {
buildDir: path.join(distDir, 'build'),
releaseDir: path.join(distDir, 'release')
}
};<|fim▁end|> | envFilePath: path.join(__dirname, '.env'),
src: {
front: { |
<|file_name|>observer_test.go<|end_file_name|><|fim▁begin|>/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package oom
import (
"testing"
"time"
"github.com/stretchr/testify/assert"
"k8s.io/api/core/v1"
"k8s.io/kubernetes/pkg/api/legacyscheme"
_ "k8s.io/kubernetes/pkg/apis/core/install" //to decode yaml
_ "k8s.io/kubernetes/pkg/apis/extensions/install" //to decode yaml
)
const pod1Yaml = `<|fim▁hole|>apiVersion: v1
kind: Pod
metadata:
name: Pod1
namespace: mockNamespace
spec:
containers:
- name: Name11
resources:
requests:
memory: "1024"
status:
containerStatuses:
- name: Name11
restartCount: 0
`
const pod2Yaml = `
apiVersion: v1
kind: Pod
metadata:
name: Pod1
namespace: mockNamespace
spec:
containers:
- name: Name11
resources:
requests:
memory: "1024"
status:
containerStatuses:
- name: Name11
restartCount: 1
lastState:
terminated:
finishedAt: 2018-02-23T13:38:48Z
reason: OOMKilled
`
func newPod(yaml string) (*v1.Pod, error) {
decode := legacyscheme.Codecs.UniversalDeserializer().Decode
obj, _, err := decode([]byte(yaml), nil, nil)
if err != nil {
return nil, err
}
return obj.(*v1.Pod), nil
}
func TestOOMReceived(t *testing.T) {
p1, err := newPod(pod1Yaml)
assert.NoError(t, err)
p2, err := newPod(pod2Yaml)
assert.NoError(t, err)
observer := NewObserver()
go observer.OnUpdate(p1, p2)
info := <-observer.ObservedOomsChannel
assert.Equal(t, "mockNamespace", info.Namespace)
assert.Equal(t, "Pod1", info.Pod)
assert.Equal(t, "Name11", info.Container)
assert.Equal(t, int64(1024), info.MemoryRequest.Value())
timestamp, err := time.Parse(time.RFC3339, "2018-02-23T13:38:48Z")
assert.NoError(t, err)
assert.Equal(t, timestamp.Unix(), info.Timestamp.Unix())
}<|fim▁end|> | |
<|file_name|>test_ml.py<|end_file_name|><|fim▁begin|>"""
Predict labels using trained ML models. Use average probability ensemble.
"""
__author__ = 'bshang'
import numpy as np
import pandas as pd
from sklearn import preprocessing
from sklearn.externals import joblib<|fim▁hole|>
def convert_label_to_array(str_label):
str_label = str_label.split(' ')
return [int(x) for x in str_label if len(x) > 0]
MODEL = 'inception-v3'
LAYER = 'global_pool_output'
NUM_EPOCH = 30
BIZ_FEATURES_PATH = '/data/test_biz_features_{0}_{1}_{2}.h5'.format(MODEL, LAYER, NUM_EPOCH)
df = pd.read_csv(BIZ_FEATURES_PATH, header=0)
cols = ["F" + str(i+1) for i in range(0, 2048)]
X = df[cols].values
model_svc = joblib.load('/data/skmodels/svc_inception-v3.pkl')
model_lrc = joblib.load('/data/skmodels/lrc_inception-v3.pkl')
model_rfc = joblib.load('/data/skmodels/rfc_inception-v3.pkl')
print('predict svc')
y_predict_proba_svc = model_svc.predict_proba(X)
print('predict lrc')
y_predict_proba_lrc = model_lrc.predict_proba(X)
print('predict rfc')
y_predict_proba_rfc = model_rfc.predict_proba(X)
y_predict_proba = np.mean(
np.array([y_predict_proba_svc, y_predict_proba_lrc, y_predict_proba_rfc]), axis=0)
THRESHOLD = 0.46 # estimated from cross-validation
y_predict = preprocessing.binarize(y_predict_proba, threshold=THRESHOLD)
# convert the binary labels back to numbered labels
df_biz2lab = pd.read_csv('/data/train.csv').dropna()
y = np.array([convert_label_to_array(y) for y in df_biz2lab['labels']])
mlb = preprocessing.MultiLabelBinarizer()
mlb.fit_transform(y)
y_ = mlb.inverse_transform(y_predict) # y_ contain the numbered labels
y_ = [' '.join(str(x) for x in ls) for ls in y_]
df['labels'] = pd.Series(y_, index=df.index)
df = df.sort_values('business_id')
with open('/data/submission/inception_v3_svc_rfc_lrc_epoch3.csv', 'w') as f:
df[['business_id', 'labels']].to_csv(f, index=False)<|fim▁end|> | |
<|file_name|>user.ts<|end_file_name|><|fim▁begin|>/// <reference path="../apimanPlugin.ts"/>
/// <reference path="../services.ts"/>
module Apiman {
export var UserRedirectController = _module.controller("Apiman.UserRedirectController",
['$q', '$scope', '$location', 'PageLifecycle', '$routeParams',
($q, $scope, $location, PageLifecycle, $routeParams) => {
PageLifecycle.loadPage('UserRedirect', undefined, undefined, $scope, function() {<|fim▁hole|>
}<|fim▁end|> | PageLifecycle.forwardTo('/users/{0}/orgs', $routeParams.user);
});
}]) |
<|file_name|>bit_distributor.rs<|end_file_name|><|fim▁begin|>use num::basic::integers::PrimitiveInt;
use num::logic::traits::{BitConvertible, NotAssign};
use std::fmt::Debug;
const COUNTER_WIDTH: usize = u64::WIDTH as usize;
/// This struct is used to configure `BitDistributor`s.
///
/// See the `BitDistributor` documentation for more.
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub struct BitDistributorOutputType {
weight: usize, // 0 means a tiny output_type
max_bits: Option<usize>,
}
impl BitDistributorOutputType {
/// Creates a normal output with a specified weight.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Panics
/// Panics if `weight` is zero.
///
/// The corresponding element grows as a power of $i$. See the `BitDistributor` documentation
/// for more.
pub fn normal(weight: usize) -> BitDistributorOutputType {
assert_ne!(weight, 0);
BitDistributorOutputType {
weight,
max_bits: None,
}
}
/// Creates a tiny output.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// The corresponding element grows logarithmically. See the `BitDistributor` documentation for
/// more.
pub const fn tiny() -> BitDistributorOutputType {
BitDistributorOutputType {
weight: 0,
max_bits: None,
}
}
}
/// `BitDistributor` helps generate tuples exhaustively.
///
/// Think of `counter` as the bits of an integer. It's initialized to zero (all `false`s), and as
/// it's repeatedly incremented, it eventually takes on every 64-bit value.
///
/// `output_types` is a list of $n$ configuration structs that, together, specify how to generate an
/// n-element tuple of unsigned integers. Calling `get_output` repeatedly, passing in 0 through
/// $n - 1$ as `index`, distributes the bits of `counter` into a tuple.
///
/// This is best shown with an example. If `output_types` is set to
/// `[BitDistributorOutputType::normal(1); 2]`, the distributor will generate all pairs of unsigned
/// integers. A pair may be extracted by calling `get_output(0)` and `get_output(1)`; then `counter`
/// may be incremented to create the next pair. In this case, the pairs will be
/// $(0, 0), (0, 1), (1, 0), (1, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 0), (2, 1), \ldots$.
///
/// If you think of these pairs as coordinates in the $xy$-plane, they are traversed along a Z-order
/// curve. Every pair of unsigned integers will be generated exactly once.
///
/// In general, setting `output_types` to `[BitDistributorOutputType::normal(1); n]` will generate
/// $n$-tuples. The elements of the tuples will be very roughly the same size, in the sense that
/// each element will grow as $O(\sqrt\[n\]{i})$, where $i$ is the counter. Sometimes we want the
/// elements to grow at different rates. To accomplish this, we can change the weights of the output
/// types. For example, if we set `output_types` to
/// `[BitDistributorOutputType::normal(1), BitDistributorOutputType::normal(2)]`, the first element
/// of the generated pairs will grow as $O(\sqrt\[3\]{i})$ and the second as $O(i^{2/3})$. In
/// general, if the weights are $w_0, w_1, \\ldots, w_{n-1}$, then the $k$th element of the output
/// tuples will grow as $O(i^{w_i/\sum_{j=0}^{n-1}w_j})$.
///
/// Apart from creating _normal_ output types with different weights, we can create _tiny_ output
/// types, which indicate that the corresponding tuple element should grow especially slowly. If
/// `output_types` contains $m$ tiny output types, each tiny tuple element grows as
/// $O(\sqrt\[m\]{\log i})$. The growth of the other elements is unaffected. Having only tiny types
/// in `output_types` is disallowed.
///
/// The above discussion of growth rates assumes that `max_bits` is not specified for any output
/// type. But if `max_bits` is set to $b$, then the corresponding element will start growing just as
/// if `max_bits` wasn't specified, but will stop growing once it reaches $2^b-1$.
#[derive(Clone, Debug, Eq, PartialEq, Hash)]
pub struct BitDistributor {
pub output_types: Vec<BitDistributorOutputType>,
bit_map: [usize; COUNTER_WIDTH],
counter: [bool; COUNTER_WIDTH],
}
impl BitDistributor {
fn new_without_init(output_types: &[BitDistributorOutputType]) -> BitDistributor {
if output_types
.iter()
.all(|output_type| output_type.weight == 0)
{
panic!("All output_types cannot be tiny");
}
BitDistributor {
output_types: output_types.to_vec(),
bit_map: [0; COUNTER_WIDTH],
counter: [false; COUNTER_WIDTH],
}
}
/// Creates a new `BitDistributor`.
///
/// # Worst-case complexity
/// $T(n) = O(n)$
///
/// $M(n) = O(n)$
///
/// where $T$ is time, $M$ is additional memory, and $n$ is `output_types.len()`.
///
/// # Examples
/// ```
/// use malachite_base::iterators::bit_distributor::{BitDistributor, BitDistributorOutputType};
///
/// BitDistributor::new(
/// &[BitDistributorOutputType::normal(2), BitDistributorOutputType::tiny()]
/// );
/// ```
pub fn new(output_types: &[BitDistributorOutputType]) -> BitDistributor {
let mut distributor = BitDistributor::new_without_init(output_types);
distributor.update_bit_map();
distributor
}
/// Returns a reference to the internal bit map as a slice.
///
/// The bit map determines which output gets each bit of the counter. For example, if the bit
/// map is $[0, 1, 0, 1, 0, 1, \ldots]$, then the first element of the output pair gets the bits
/// with indices $0, 2, 4, \ldots$ and the second element gets the bits with indices
/// $1, 3, 5, \ldots$.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Examples
/// ```
/// use malachite_base::iterators::bit_distributor::{BitDistributor, BitDistributorOutputType};
///
/// let bd = BitDistributor::new(&[
/// BitDistributorOutputType::normal(2),
/// BitDistributorOutputType::tiny(),
/// ]);
/// assert_eq!(
/// bd.bit_map_as_slice(),
/// &[
/// 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
/// 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
/// 0, 0, 0, 0, 0, 0, 0, 1
/// ][..]
/// );
/// ```
pub fn bit_map_as_slice(&self) -> &[usize] {
self.bit_map.as_ref()
}
fn update_bit_map(&mut self) {
let (mut normal_output_type_indices, mut tiny_output_type_indices): (
Vec<usize>,
Vec<usize>,
) = (0..self.output_types.len()).partition(|&i| self.output_types[i].weight != 0);
let mut normal_output_types_bits_used = vec![0; normal_output_type_indices.len()];
let mut tiny_output_types_bits_used = vec![0; tiny_output_type_indices.len()];
let mut ni = normal_output_type_indices.len() - 1;
let mut ti = tiny_output_type_indices.len().saturating_sub(1);
let mut weight_counter = self.output_types[normal_output_type_indices[ni]].weight;
for i in 0..COUNTER_WIDTH {
let use_normal_output_type = !normal_output_type_indices.is_empty()
&& (tiny_output_type_indices.is_empty() || !usize::is_power_of_two(i + 1));
if use_normal_output_type {
self.bit_map[i] = normal_output_type_indices[ni];
let output_type = self.output_types[normal_output_type_indices[ni]];
normal_output_types_bits_used[ni] += 1;
weight_counter -= 1;
if output_type.max_bits == Some(normal_output_types_bits_used[ni]) {
normal_output_type_indices.remove(ni);
normal_output_types_bits_used.remove(ni);
if normal_output_type_indices.is_empty() {
continue;
}
weight_counter = 0;
}
if weight_counter == 0 {
if ni == 0 {
ni = normal_output_type_indices.len() - 1;
} else {
ni -= 1;
}
weight_counter = self.output_types[normal_output_type_indices[ni]].weight;
}
} else {
if tiny_output_type_indices.is_empty() {
self.bit_map[i] = usize::MAX;
continue;
}
self.bit_map[i] = tiny_output_type_indices[ti];
let output_type = self.output_types[tiny_output_type_indices[ti]];
tiny_output_types_bits_used[ti] += 1;
if output_type.max_bits == Some(tiny_output_types_bits_used[ti]) {
tiny_output_type_indices.remove(ti);
tiny_output_types_bits_used.remove(ti);<|fim▁hole|> if tiny_output_type_indices.is_empty() {
continue;
}
}
if ti == 0 {
ti = tiny_output_type_indices.len() - 1;
} else {
ti -= 1;
}
}
}
}
/// Sets the maximum bits for several outputs.
///
/// Given slice of output indices, sets the maximum bits for each of the outputs and rebuilds
/// the bit map.
///
/// # Worst-case complexity
/// $T(n) = O(n)$
///
/// $M(n) = O(1)$
///
/// where $T$ is time, $M$ is additional memory, and $n$ is `output_type_indices.len()`.
///
/// # Panics
/// Panics if `max_bits` is 0 or if any index is greater than or equal to
/// `self.output_types.len()`.
///
/// # Examples
/// ```
/// use malachite_base::iterators::bit_distributor::{BitDistributor, BitDistributorOutputType};
///
/// let mut bd = BitDistributor::new(&[BitDistributorOutputType::normal(2); 3]);
/// assert_eq!(
/// bd.bit_map_as_slice(),
/// &[
/// 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 1, 1,
/// 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 2,
/// 1, 1, 0, 0, 2, 2, 1, 1
/// ][..]
/// );
///
/// bd.set_max_bits(&[0, 2], 5);
/// assert_eq!(
/// bd.bit_map_as_slice(),
/// &[
/// 2, 2, 1, 1, 0, 0, 2, 2, 1, 1, 0, 0, 2, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
/// 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
/// 1, 1, 1, 1, 1, 1, 1, 1
/// ][..]
/// );
/// ```
pub fn set_max_bits(&mut self, output_type_indices: &[usize], max_bits: usize) {
assert_ne!(max_bits, 0);
for &index in output_type_indices {
self.output_types[index].max_bits = Some(max_bits);
}
self.update_bit_map();
}
/// Increments the counter in preparation for a new set of outputs.
///
/// If the counter is incremented $2^{64}$ times, it rolls back to 0.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Examples
/// ```
/// use malachite_base::iterators::bit_distributor::{BitDistributor, BitDistributorOutputType};
///
/// let mut bd = BitDistributor::new(&[BitDistributorOutputType::normal(1)]);
/// let mut outputs = Vec::new();
/// for _ in 0..20 {
/// outputs.push(bd.get_output(0));
/// bd.increment_counter();
/// }
/// assert_eq!(
/// outputs,
/// &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
/// );
/// ```
pub fn increment_counter(&mut self) {
for b in self.counter.iter_mut() {
b.not_assign();
if *b {
break;
}
}
}
/// Gets the output at a specified index.
///
/// # Worst-case complexity
/// Constant time and additional memory.
///
/// # Panics
/// Panics if `index` is greater than or equal to `self.output_types.len()`.
///
/// # Examples
/// ```
/// extern crate itertools;
///
/// use itertools::Itertools;
///
/// use malachite_base::iterators::bit_distributor::{BitDistributor, BitDistributorOutputType};
///
/// let mut bd = BitDistributor::new(&[BitDistributorOutputType::normal(1); 2]);
/// let mut outputs = Vec::new();
/// for _ in 0..10 {
/// outputs.push((0..2).map(|i| bd.get_output(i)).collect_vec());
/// bd.increment_counter();
/// }
/// let expected_outputs: &[&[usize]] = &[
/// &[0, 0], &[0, 1], &[1, 0], &[1, 1], &[0, 2], &[0, 3], &[1, 2], &[1, 3], &[2, 0], &[2, 1]
/// ];
/// assert_eq!(outputs, expected_outputs,);
/// ```
pub fn get_output(&self, index: usize) -> usize {
assert!(index < self.output_types.len());
usize::from_bits_asc(
self.bit_map
.iter()
.zip(self.counter.iter())
.filter_map(|(&m, &c)| if m == index { Some(c) } else { None }),
)
}
}<|fim▁end|> | |
<|file_name|>case207.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2004 SIPfoundry Inc.
# Licensed by SIPfoundry under the GPL license.
#
# Copyright (C) 2004 SIP Forum
# Licensed to SIPfoundry under a Contributor Agreement.
#
#
# This file is part of SIP Forum User Agent Basic Test Suite which
# belongs to the SIP Forum Test Framework.
#
# SIP Forum User Agent Basic Test Suite is free software; you can
# redistribute it and/or modify it under the terms of the GNU General
# Public License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# SIP Forum User Agent Basic Test Suite is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SIP Forum User Agent Basic Test Suite; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307 USA
#
# $Id: case207.py,v 1.2 2004/05/02 18:57:35 lando Exp $
#
from TestCase import TestCase
import NetworkEventHandler as NEH
import Log
class case207 (TestCase):
def config(self):
self.name = "Case 207"
self.description = "Content length larger than message"
self.isClient = True
self.transport = "UDP"
def run(self):
self.neh = NEH.NetworkEventHandler(self.transport)
inv = self.createRequest("INVITE")
cl = inv.getParsedHeaderValue("Content-Length")
cl.length = 9999
inv.setHeaderValue("Content-Length", cl.create())
self.writeMessageToNetwork(self.neh, inv)
self.code = 0
while (self.code <= 200):
repl = self.readReplyFromNetwork(self.neh)
if (repl is not None) and (repl.code > self.code):
self.code = repl.code
elif repl is None:
self.code = 999
if repl is None:
self.addResult(TestCase.TC_FAILED, "missing reply on request")
self.neh.closeSock()
def onDefaultCode(self, message):
if message.code > self.code:
self.code = message.code
if message.code >= 200:
if message.getParsedHeaderValue("CSeq").method == "INVITE":
Log.logDebug("case207: sending ACK for >= 200 reply", 3)<|fim▁hole|> elif message.code == 200:
if message.transaction.canceled:
Log.logDebug("case207: received 200 for CANCEL", 3)
else:
Log.logDebug("case207: sending BYE for accepted INVITE", 3)
bye = self.createRequest("BYE", dia=message.transaction.dialog)
self.writeMessageToNetwork(self.neh, bye)
rep = self.readReplyFromNetwork(self.neh)
if rep is None:
self.addResult(TestCase.TC_ERROR, "missing response on BYE")
elif message.code != 487:
self.addResult(TestCase.TC_FAILED, "INVITE rejected, but not with 400")
else:
self.addResult(TestCase.TC_FAILED, "INVITE accepted, not rejected with 400")
can = self.createRequest("CANCEL", trans=message.transaction)
message.transaction.canceled = True
self.writeMessageToNetwork(self.neh, can)
canrepl = self.readReplyFromNetwork(self.neh)
if canrepl is None:
self.addResult(TestCase.TC_ERROR, "missing 200 on CANCEL")<|fim▁end|> | ack = self.createRequest("ACK", trans=message.transaction)
self.writeMessageToNetwork(self.neh, ack)
if message.code == 400:
self.addResult(TestCase.TC_PASSED, "INVITE rejected with 400") |
<|file_name|>test-treestate.py<|end_file_name|><|fim▁begin|># Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
from __future__ import absolute_import
import itertools
import os
import posixpath
import random
import tempfile
import unittest
import silenttestrunner
from bindings import treestate
from edenscm.mercurial import pycompat
from hghave import require
testtmp = os.getenv("TESTTMP") or tempfile.mkdtemp("test-treestate")
def randname():
length = random.randint(1, 4)
return "".join(random.sample("abcdef", 1)[0] for i in range(length))
def randpath(path=""):
# pop components from path
for i in range(1 + random.randrange(path.count("/") + 1)):
path = os.path.dirname(path)
# push new components to path
maxlevel = 4
for i in range(1 + random.randrange(max([1, maxlevel - path.count("/")]))):<|fim▁hole|> if not path:
path = randname()
return path
def genpaths():
"""generate random paths"""
path = ""
while True:
nextpath = randpath(path)
yield nextpath
path = nextpath
def genfiles():
"""generate random tuple of (path, bits, mode, size, mtime, copied)"""
pathgen = genpaths()
while True:
path = next(pathgen)
bits = 0
mode = random.randint(0, 0o777)
size = random.randint(0, 1 << 31)
mtime = random.randint(-1, 1 << 31)
copied = None
# bits (StateFlags)
for bit in [
treestate.EXIST_P1,
treestate.EXIST_P2,
treestate.EXIST_NEXT,
treestate.IGNORED,
treestate.NEED_CHECK,
]:
if random.randint(0, 1):
bits |= bit
if random.randint(0, 1):
bits |= treestate.COPIED
copied = next(pathgen)
yield (path, bits, mode, size, mtime, copied)
class testtreestate(unittest.TestCase):
def testempty(self):
tree = treestate.treestate(os.path.join(testtmp, "empty"), 0)
self.assertEqual(len(tree), 0)
self.assertEqual(tree.getmetadata(), b"")
self.assertEqual(tree.walk(0, 0), [])
self.assertTrue(tree.hasdir("/"))
for path in ["", "a", "/", "b/c", "d/"]:
self.assertFalse(path in tree)
if path and path != "/":
self.assertFalse(tree.hasdir(path))
if path != "/":
if path.endswith("/"):
self.assertIsNone(tree.getdir(path))
else:
self.assertIsNone(tree.get(path, None))
def testinsert(self):
tree = treestate.treestate(os.path.join(testtmp, "insert"), 0)
count = 5000
files = list(itertools.islice(genfiles(), count))
expected = {}
for path, bits, mode, size, mtime, copied in files:
tree.insert(path, bits, mode, size, mtime, copied)
expected[path] = (bits, mode, size, mtime, copied)
self.assertEqual(len(tree), len(expected))
for path in tree.walk(0, 0):
self.assertTrue(tree.hasdir(os.path.dirname(path) + "/"))
self.assertEqual(tree.get(path, None), expected[path])
def testremove(self):
tree = treestate.treestate(os.path.join(testtmp, "remove"), 0)
count = 5000
files = list(itertools.islice(genfiles(), count))
expected = {}
for path, bits, mode, size, mtime, copied in files:
tree.insert(path, bits, mode, size, mtime, copied)
if (mtime & 1) == 0:
tree.remove(path)
if path in expected:
del expected[path]
else:
expected[path] = (bits, mode, size, mtime, copied)
self.assertEqual(len(tree), len(expected))
for path in tree.walk(0, 0):
self.assertTrue(tree.hasdir(os.path.dirname(path) + "/"))
self.assertEqual(tree.get(path, None), expected[path])
def testwalk(self):
treepath = os.path.join(testtmp, "walk")
tree = treestate.treestate(treepath, 0)
count = 5000
files = list(itertools.islice(genfiles(), count))
expected = {}
for path, bits, mode, size, mtime, copied in files:
tree.insert(path, bits, mode, size, mtime, copied)
expected[path] = (bits, mode, size, mtime, copied)
def walk(setbits, unsetbits):
return sorted(
k
for k, v in pycompat.iteritems(expected)
if ((v[0] & unsetbits) == 0 and (v[0] & setbits) == setbits)
)
def check(setbits, unsetbits):
self.assertEqual(
walk(setbits, unsetbits), sorted(tree.walk(setbits, unsetbits))
)
for i in ["in-memory", "flushed"]:
for bit in [treestate.IGNORED, treestate.COPIED]:
check(0, bit)
check(bit, 0)
check(treestate.EXIST_P1, treestate.EXIST_P2)
rootid = tree.flush()
tree = treestate.treestate(treepath, rootid)
def testdirfilter(self):
treepath = os.path.join(testtmp, "walk")
tree = treestate.treestate(treepath, 0)
files = ["a/b", "a/b/c", "b/c", "c/d"]
for path in files:
tree.insert(path, 1, 2, 3, 4, None)
self.assertEqual(tree.walk(1, 0, None), files)
self.assertEqual(
tree.walk(1, 0, lambda dir: dir in {"a/b/", "c/"}), ["a/b", "b/c"]
)
self.assertEqual(tree.walk(1, 0, lambda dir: True), [])
def testflush(self):
treepath = os.path.join(testtmp, "flush")
tree = treestate.treestate(treepath, 0)
tree.insert("a", 1, 2, 3, 4, None)
tree.setmetadata(b"1")
rootid1 = tree.flush()
tree.remove("a")
tree.insert("b", 1, 2, 3, 4, None)
tree.setmetadata(b"2")
rootid2 = tree.flush()
tree = treestate.treestate(treepath, rootid1)
self.assertTrue("a" in tree)
self.assertFalse("b" in tree)
self.assertEqual(tree.getmetadata(), b"1")
tree = treestate.treestate(treepath, rootid2)
self.assertFalse("a" in tree)
self.assertTrue("b" in tree)
self.assertEqual(tree.getmetadata(), b"2")
def testsaveas(self):
treepath = os.path.join(testtmp, "saveas")
tree = treestate.treestate(treepath, 0)
tree.insert("a", 1, 2, 3, 4, None)
tree.setmetadata(b"1")
tree.flush()
tree.insert("b", 1, 2, 3, 4, None)
tree.remove("a")
treepath = "%s-savedas" % treepath
tree.setmetadata(b"2")
rootid = tree.saveas(treepath)
tree = treestate.treestate(treepath, rootid)
self.assertFalse("a" in tree)
self.assertTrue("b" in tree)
self.assertEqual(tree.getmetadata(), b"2")
def testfiltered(self):
treepath = os.path.join(testtmp, "filtered")
tree = treestate.treestate(treepath, 0)
tree.insert("a/B/c", 1, 2, 3, 4, None)
filtered = tree.getfiltered("A/B/C", lambda x: x.upper(), 1)
self.assertEqual(filtered, ["a/B/c"])
filtered = tree.getfiltered("A/B/C", lambda x: x, 2)
self.assertEqual(filtered, [])
def testpathcomplete(self):
treepath = os.path.join(testtmp, "pathcomplete")
tree = treestate.treestate(treepath, 0)
paths = ["a/b/c", "a/b/d", "a/c", "de"]
for path in paths:
tree.insert(path, 1, 2, 3, 4, None)
def complete(prefix, fullpath=False):
completed = []
tree.pathcomplete(prefix, 0, 0, completed.append, fullpath)
return completed
self.assertEqual(complete(""), ["a/", "de"])
self.assertEqual(complete("d"), ["de"])
self.assertEqual(complete("a/"), ["a/b/", "a/c"])
self.assertEqual(complete("a/b/"), ["a/b/c", "a/b/d"])
self.assertEqual(complete("a/b/c"), ["a/b/c"])
self.assertEqual(complete("", True), paths)
def testgetdir(self):
treepath = os.path.join(testtmp, "filtered")
tree = treestate.treestate(treepath, 0)
tree.insert("a/b/c", 3, 0, 0, 0, None)
tree.insert("a/d", 5, 0, 0, 0, None)
self.assertEqual(tree.getdir("/"), (3 | 5, 3 & 5))
self.assertEqual(tree.getdir("a/"), (3 | 5, 3 & 5))
self.assertEqual(tree.getdir("a/b/"), (3, 3))
self.assertIsNone(tree.getdir("a/b/c/"))
tree.insert("a/e/f", 10, 0, 0, 0, None)
self.assertEqual(tree.getdir("a/"), (3 | 5 | 10, 3 & 5 & 10))
tree.remove("a/e/f")
self.assertEqual(tree.getdir("a/"), (3 | 5, 3 & 5))
def testsubdirquery(self):
treepath = os.path.join(testtmp, "subdir")
tree = treestate.treestate(treepath, 0)
paths = ["a/b/c", "a/b/d", "a/c", "de"]
for path in paths:
tree.insert(path, 1, 2, 3, 4, None)
self.assertEqual(tree.tracked(""), paths)
self.assertEqual(tree.tracked("de"), ["de"])
self.assertEqual(tree.tracked("a"), [])
self.assertEqual(tree.tracked("a/"), ["a/b/c", "a/b/d", "a/c"])
self.assertEqual(tree.tracked("a/b/"), ["a/b/c", "a/b/d"])
self.assertEqual(tree.tracked("a/b"), [])
self.assertEqual(tree.tracked("a/c/"), [])
self.assertEqual(tree.tracked("a/c"), ["a/c"])
if __name__ == "__main__":
silenttestrunner.main(__name__)<|fim▁end|> | path = posixpath.join(path, randname())
|
<|file_name|>blueprint.go<|end_file_name|><|fim▁begin|>package server
import (
"context"
"github.com/antihax/goesi"
"github.com/motki/motkid/model"
"github.com/motki/motkid/proto"
"github.com/pkg/errors"
)
func (srv *GRPCServer) getAuthorizedContext(tok *proto.Token, role model.Role) (context.Context, int, error) {
if tok == nil || tok.Identifier == "" {
return nil, 0, errors.New("token cannot be empty")<|fim▁hole|> return nil, 0, err
}
a, err := srv.model.GetAuthorization(user, role)
if err != nil {
return nil, 0, err
}
source, err := srv.eveapi.TokenSource((*goesi.CRESTToken)(a.Token))
if err != nil {
return nil, 0, err
}
info, err := srv.eveapi.Verify(source)
if err != nil {
return nil, 0, err
}
t, err := source.Token()
if err != nil {
return nil, 0, err
}
if err = srv.model.SaveAuthorization(user, role, int(info.CharacterID), t); err != nil {
return nil, 0, err
}
return context.WithValue(context.Background(), goesi.ContextOAuth2, source), int(info.CharacterID), nil
}
func (srv *GRPCServer) GetCorpBlueprints(ctx context.Context, req *proto.GetCorpBlueprintsRequest) (resp *proto.GetCorpBlueprintsResponse, err error) {
defer func() {
if err != nil {
resp = &proto.GetCorpBlueprintsResponse{
Result: errorResult(err),
}
err = nil
}
}()
if req.Token == nil {
return nil, errors.New("token cannot be empty")
}
ctx, charID, err := srv.getAuthorizedContext(req.Token, model.RoleLogistics)
if err != nil {
return nil, err
}
char, err := srv.model.GetCharacter(charID)
if err != nil {
return nil, err
}
bps, err := srv.model.GetCorporationBlueprints(ctx, char.CorporationID)
if err != nil {
return nil, err
}
var results []*proto.Blueprint
for _, bp := range bps {
results = append(results, proto.BlueprintToProto(bp))
}
return &proto.GetCorpBlueprintsResponse{
Result: successResult,
Blueprint: results,
}, nil
}<|fim▁end|> | }
user, err := srv.model.GetUserBySessionKey(tok.Identifier)
if err != nil { |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/// The intermediate representation.
pub extern crate compiler_ir as ir;
/// The machine module.
pub extern crate compiler_machine as machine;
/// The machine level IR.
pub extern crate compiler_mir as mir;<|fim▁hole|>pub extern crate compiler_pass as pass;
/// The register allocator.
pub extern crate compiler_regalloc as regalloc;
/// The instruction selector.
pub extern crate compiler_select as select;
/// The backend.
pub extern crate compiler_target as target;
/// The integrated tester.
pub extern crate compiler_test as test;
/// Various utilities.
pub extern crate compiler_util as util;<|fim▁end|> | /// The pass infrastructure. |
<|file_name|>Repo.test.ts<|end_file_name|><|fim▁begin|>import { detectState, RepoState } from '../Repo'
import logger from '../../Logger'
import { spawnSync, SpawnSyncOptionsWithStringEncoding, SpawnSyncReturns } from 'child_process'
jest.mock('child_process')
jest.mock('../../Logger')
afterEach(() => jest.resetAllMocks())
type spawnSyncFn = (command: string, args?: readonly string[], options?: SpawnSyncOptionsWithStringEncoding) => SpawnSyncReturns<string>
test('detectState(): no repo', async () => {
const spawnSyncMock = (spawnSync as unknown as jest.MockedFunction<spawnSyncFn>)
spawnSyncMock.mockReturnValue({
status: 128,
signal: null,
output: [
'',
'',
'fatal: not a git repository (or any of the parent directories): .git\n'
],
pid: 185,
stdout: '',
stderr: 'fatal: not a git repository (or any of the parent directories): .git\n'
})
expect(detectState('/example/dir', logger)).toBe(RepoState.NONE)
expect(spawnSyncMock).toHaveBeenCalledWith('git', ['status', '--porcelain'], { cwd: '/example/dir', encoding: 'utf8' })
})
test('detectState(): dirty repo', async () => {
const spawnSyncMock = (spawnSync as unknown as jest.MockedFunction<spawnSyncFn>)
spawnSyncMock.mockReturnValue({
status: 0,
signal: null,
output: [
'',
'?? index.js\n?? package.json\n',
''
],
pid: 304,
stdout: '?? index.js\n?? package.json\n',
stderr: ''
})
expect(detectState('/example/dir', logger)).toBe(RepoState.GIT_DIRTY)
expect(spawnSyncMock).toHaveBeenCalledWith('git', ['status', '--porcelain'], { cwd: '/example/dir', encoding: 'utf8' })
})
test('detectState(): clean repo', async () => {
const spawnSyncMock = (spawnSync as unknown as jest.MockedFunction<spawnSyncFn>)
spawnSyncMock.mockReturnValue({
status: 0,
signal: null,
output: [
'',
'',
''
],
pid: 198,
stdout: '',
stderr: ''
})
expect(detectState('/example/dir', logger)).toBe(RepoState.GIT_CLEAN)
expect(spawnSyncMock).toHaveBeenCalledWith('git', ['status', '--porcelain'], { cwd: '/example/dir', encoding: 'utf8' })
})
<|fim▁hole|> status: 0,
signal: null,
output: [
'',
'',
''
],
pid: 198,
stdout: '',
stderr: '',
error
})
expect(detectState('/example/dir', logger)).toBe(RepoState.UNKNOWN)
expect(spawnSyncMock).toHaveBeenCalledWith('git', ['status', '--porcelain'], { cwd: '/example/dir', encoding: 'utf8' })
expect(logger.warn).toHaveBeenCalledWith(error)
})
test('detectState(): ENOENT error should not log a warning', async () => {
const spawnSyncMock = (spawnSync as unknown as jest.MockedFunction<spawnSyncFn>)
const error = new Error('fail')
error.code = 'ENOENT'
spawnSyncMock.mockReturnValue({
status: 0,
signal: null,
output: [
'',
'',
''
],
pid: 198,
stdout: '',
stderr: '',
error
})
expect(detectState('/example/dir', logger)).toBe(RepoState.UNKNOWN)
expect(spawnSyncMock).toHaveBeenCalledWith('git', ['status', '--porcelain'], { cwd: '/example/dir', encoding: 'utf8' })
expect(logger.warn).not.toHaveBeenCalled()
})<|fim▁end|> | test('detectState(): unknown error', async () => {
const spawnSyncMock = (spawnSync as unknown as jest.MockedFunction<spawnSyncFn>)
const error = new Error('fail')
spawnSyncMock.mockReturnValue({ |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod expand_repeats;
mod expand_tokens;
mod extract_default_aliases;
mod extract_tokens;
mod flatten_grammar;
mod intern_symbols;
mod process_inlines;
pub(crate) use self::expand_tokens::expand_tokens;
use self::expand_repeats::expand_repeats;
use self::extract_default_aliases::extract_default_aliases;
use self::extract_tokens::extract_tokens;
use self::flatten_grammar::flatten_grammar;
use self::intern_symbols::intern_symbols;
use self::process_inlines::process_inlines;
use super::grammars::{
ExternalToken, InlinedProductionMap, InputGrammar, LexicalGrammar, PrecedenceEntry,
SyntaxGrammar, Variable,
};
use super::rules::{AliasMap, Precedence, Rule, Symbol};
use anyhow::{anyhow, Result};
use std::{
cmp::Ordering,
collections::{hash_map, HashMap, HashSet},
mem,
};
pub(crate) struct IntermediateGrammar<T, U> {
variables: Vec<Variable>,
extra_symbols: Vec<T>,
expected_conflicts: Vec<Vec<Symbol>>,
precedence_orderings: Vec<Vec<PrecedenceEntry>>,
external_tokens: Vec<U>,
variables_to_inline: Vec<Symbol>,
supertype_symbols: Vec<Symbol>,
word_token: Option<Symbol>,
}
pub(crate) type InternedGrammar = IntermediateGrammar<Rule, Variable>;
pub(crate) type ExtractedSyntaxGrammar = IntermediateGrammar<Symbol, ExternalToken>;
#[derive(Debug, PartialEq, Eq)]
pub(crate) struct ExtractedLexicalGrammar {
pub variables: Vec<Variable>,
pub separators: Vec<Rule>,
}
impl<T, U> Default for IntermediateGrammar<T, U> {
fn default() -> Self {
Self {
variables: Default::default(),
extra_symbols: Default::default(),
expected_conflicts: Default::default(),
precedence_orderings: Default::default(),
external_tokens: Default::default(),
variables_to_inline: Default::default(),
supertype_symbols: Default::default(),
word_token: Default::default(),
}
}
}
/// Transform an input grammar into separate components that are ready
/// for parse table construction.
pub(crate) fn prepare_grammar(
input_grammar: &InputGrammar,
) -> Result<(
SyntaxGrammar,
LexicalGrammar,
InlinedProductionMap,
AliasMap,
)> {
validate_precedences(input_grammar)?;
let interned_grammar = intern_symbols(input_grammar)?;
let (syntax_grammar, lexical_grammar) = extract_tokens(interned_grammar)?;
let syntax_grammar = expand_repeats(syntax_grammar);
let mut syntax_grammar = flatten_grammar(syntax_grammar)?;
let lexical_grammar = expand_tokens(lexical_grammar)?;
let default_aliases = extract_default_aliases(&mut syntax_grammar, &lexical_grammar);
let inlines = process_inlines(&syntax_grammar, &lexical_grammar)?;
Ok((syntax_grammar, lexical_grammar, inlines, default_aliases))
}
/// Check that all of the named precedences used in the grammar are declared
/// within the `precedences` lists, and also that there are no conflicting
/// precedence orderings declared in those lists.
fn validate_precedences(grammar: &InputGrammar) -> Result<()> {
// For any two precedence names `a` and `b`, if `a` comes before `b`
// in some list, then it cannot come *after* `b` in any list.
let mut pairs = HashMap::new();
for list in &grammar.precedence_orderings {
for (i, mut entry1) in list.iter().enumerate() {
for mut entry2 in list.iter().skip(i + 1) {
if entry2 == entry1 {
continue;
}
let mut ordering = Ordering::Greater;
if entry1 > entry2 {
ordering = Ordering::Less;
mem::swap(&mut entry1, &mut entry2);
}
match pairs.entry((entry1, entry2)) {
hash_map::Entry::Vacant(e) => {
e.insert(ordering);
}
hash_map::Entry::Occupied(e) => {
if e.get() != &ordering {
return Err(anyhow!(
"Conflicting orderings for precedences {} and {}",
entry1,
entry2
));
}
}
}
}
}
}
// Check that no rule contains a named precedence that is not present in
// any of the `precedences` lists.
fn validate(rule_name: &str, rule: &Rule, names: &HashSet<&String>) -> Result<()> {
match rule {
Rule::Repeat(rule) => validate(rule_name, rule, names),
Rule::Seq(elements) | Rule::Choice(elements) => elements
.iter()
.map(|e| validate(rule_name, e, names))
.collect(),
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = ¶ms.precedence {
if !names.contains(n) {
return Err(anyhow!(
"Undeclared precedence '{}' in rule '{}'",
n,
rule_name
));
}
}
validate(rule_name, rule, names)?;
Ok(())
}
_ => Ok(()),
}
}
let precedence_names = grammar
.precedence_orderings
.iter()
.flat_map(|l| l.iter())
.filter_map(|p| {
if let PrecedenceEntry::Name(n) = p {
Some(n)
} else {
None
}
})
.collect::<HashSet<&String>>();
for variable in &grammar.variables {
validate(&variable.name, &variable.rule, &precedence_names)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::generate::grammars::{InputGrammar, Variable, VariableType};
#[test]
fn test_validate_precedences_with_undeclared_precedence() {
let grammar = InputGrammar {
precedence_orderings: vec![
vec![
PrecedenceEntry::Name("a".to_string()),
PrecedenceEntry::Name("b".to_string()),
],
vec![
PrecedenceEntry::Name("b".to_string()),
PrecedenceEntry::Name("c".to_string()),
PrecedenceEntry::Name("d".to_string()),
],
],
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
rule: Rule::Seq(vec![
Rule::prec_left(Precedence::Name("b".to_string()), Rule::string("w")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("x")),
]),
},
Variable {
name: "v2".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::Choice(vec![
Rule::prec_left(Precedence::Name("omg".to_string()), Rule::string("y")),<|fim▁hole|> ..Default::default()
};
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().to_string(),
"Undeclared precedence 'omg' in rule 'v2'",
);
}
#[test]
fn test_validate_precedences_with_conflicting_order() {
let grammar = InputGrammar {
precedence_orderings: vec![
vec![
PrecedenceEntry::Name("a".to_string()),
PrecedenceEntry::Name("b".to_string()),
],
vec![
PrecedenceEntry::Name("b".to_string()),
PrecedenceEntry::Name("c".to_string()),
PrecedenceEntry::Name("a".to_string()),
],
],
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
rule: Rule::Seq(vec![
Rule::prec_left(Precedence::Name("b".to_string()), Rule::string("w")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("x")),
]),
},
Variable {
name: "v2".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::Choice(vec![
Rule::prec_left(Precedence::Name("a".to_string()), Rule::string("y")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("z")),
])),
},
],
..Default::default()
};
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().to_string(),
"Conflicting orderings for precedences 'a' and 'b'",
);
}
}<|fim▁end|> | Rule::prec(Precedence::Name("c".to_string()), Rule::string("z")),
])),
},
], |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, url
<|fim▁hole|> '',
url(r'^$', 'whatify.views.index'),
url(r'^search/(.+)$', 'whatify.views.search'),
url(r'^torrent_groups/(\d+)$', 'whatify.views.get_torrent_group'),
url(r'^torrent_groups/(\d+)/download$', 'whatify.views.download_torrent_group'),
url(r'^torrent_groups/random$', 'whatify.views.random_torrent_groups'),
url(r'^torrent_groups/top10$', 'whatify.views.top10_torrent_groups'),
url(r'^artists/(\d+)$', 'whatify.views.get_artist'),
)<|fim▁end|> | urlpatterns = patterns( |
<|file_name|>oauth2_session.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
from oauthlib.common import generate_token, urldecode
from oauthlib.oauth2 import WebApplicationClient, InsecureTransportError
from oauthlib.oauth2 import TokenExpiredError, is_secure_transport
import requests
log = logging.getLogger(__name__)
log.setLevel(logging.WARNING)
class TokenUpdated(Warning):
def __init__(self, token):
super(TokenUpdated, self).__init__()
self.token = token
class OAuth2Session(requests.Session):
"""Versatile OAuth 2 extension to :class:`requests.Session`.
Supports any grant type adhering to :class:`oauthlib.oauth2.Client` spec
including the four core OAuth 2 grants.
Can be used to create authorization urls, fetch tokens and access protected
resources using the :class:`requests.Session` interface you are used to.
- :class:`oauthlib.oauth2.WebApplicationClient` (default): Authorization Code Grant
- :class:`oauthlib.oauth2.MobileApplicationClient`: Implicit Grant
- :class:`oauthlib.oauth2.LegacyApplicationClient`: Password Credentials Grant
- :class:`oauthlib.oauth2.BackendApplicationClient`: Client Credentials Grant
Note that the only time you will be using Implicit Grant from python is if
you are driving a user agent able to obtain URL fragments.
"""
def __init__(self, client_id=None, client=None, auto_refresh_url=None,
auto_refresh_kwargs=None, scope=None, redirect_uri=None, token=None,
state=None, token_updater=None, **kwargs):
"""Construct a new OAuth 2 client session.
:param client_id: Client id obtained during registration
:param client: :class:`oauthlib.oauth2.Client` to be used. Default is
WebApplicationClient which is useful for any
hosted application but not mobile or desktop.
:param scope: List of scopes you wish to request access to
:param redirect_uri: Redirect URI you registered as callback
:param token: Token dictionary, must include access_token
and token_type.
:param state: State string used to prevent CSRF. This will be given
when creating the authorization url and must be supplied
when parsing the authorization response.
Can be either a string or a no argument callable.
:auto_refresh_url: Refresh token endpoint URL, must be HTTPS. Supply
this if you wish the client to automatically refresh
your access tokens.
:auto_refresh_kwargs: Extra arguments to pass to the refresh token
endpoint.
:token_updater: Method with one argument, token, to be used to update
your token databse on automatic token refresh. If not
set a TokenUpdated warning will be raised when a token
has been refreshed. This warning will carry the token
in its token argument.
:param kwargs: Arguments to pass to the Session constructor.
"""
super(OAuth2Session, self).__init__(**kwargs)
self.client_id = client_id
if client is not None and not self.client_id:
self.client_id = client.client_id
self.scope = scope
self.redirect_uri = redirect_uri
self.token = token or {}
self.state = state or generate_token
self._state = state
self.auto_refresh_url = auto_refresh_url
self.auto_refresh_kwargs = auto_refresh_kwargs or {}
self.token_updater = token_updater
self._client = client or WebApplicationClient(client_id, token=token)
self._client._populate_attributes(token or {})
# Allow customizations for non compliant providers through various
# hooks to adjust requests and responses.
self.compliance_hook = {
'access_token_response': set([]),
'refresh_token_response': set([]),
'protected_request': set([]),
}
def new_state(self):
"""Generates a state string to be used in authorizations."""
try:
self._state = self.state()
log.debug('Generated new state %s.', self._state)
except TypeError:
self._state = self.state
log.debug('Re-using previously supplied state %s.', self._state)
return self._state
@property
def authorized(self):
"""Boolean that indicates whether this session has an OAuth token
or not. If `self.authorized` is True, you can reasonably expect
OAuth-protected requests to the resource to succeed. If
`self.authorized` is False, you need the user to go through the OAuth
authentication dance before OAuth-protected requests to the resource
will succeed.
"""
return bool(self._client.access_token)
def authorization_url(self, url, state=None, **kwargs):
"""Form an authorization URL.
:param url: Authorization endpoint url, must be HTTPS.
:param state: An optional state string for CSRF protection. If not
given it will be generated for you.
:param kwargs: Extra parameters to include.
:return: authorization_url, state
"""
state = state or self.new_state()
return self._client.prepare_request_uri(url,
redirect_uri=self.redirect_uri,
scope=self.scope,
state=state,
**kwargs), state
def fetch_token(self, token_url, code=None, authorization_response=None,
body='', auth=None, username=None, password=None, method='POST',
timeout=None, headers=None, verify=True, **kwargs):
"""Generic method for fetching an access token from the token endpoint.
If you are using the MobileApplicationClient you will want to use
token_from_fragment instead of fetch_token.
:param token_url: Token endpoint URL, must use HTTPS.
:param code: Authorization code (used by WebApplicationClients).
:param authorization_response: Authorization response URL, the callback
URL of the request back to you. Used by
WebApplicationClients instead of code.
:param body: Optional application/x-www-form-urlencoded body to add the
include in the token request. Prefer kwargs over body.
:param auth: An auth tuple or method as accepted by requests.
:param username: Username used by LegacyApplicationClients.
:param password: Password used by LegacyApplicationClients.
:param method: The HTTP method used to make the request. Defaults
to POST, but may also be GET. Other methods should
be added as needed.
:param headers: Dict to default request headers with.
:param timeout: Timeout of the request in seconds.
:param verify: Verify SSL certificate.
:param kwargs: Extra parameters to include in the token request.
:return: A token dict
"""
if not is_secure_transport(token_url):
raise InsecureTransportError()
if not code and authorization_response:
self._client.parse_request_uri_response(authorization_response,
state=self._state)
code = self._client.code
elif not code and isinstance(self._client, WebApplicationClient):
code = self._client.code
if not code:
raise ValueError('Please supply either code or '
'authorization_code parameters.')
body = self._client.prepare_request_body(code=code, body=body,
redirect_uri=self.redirect_uri, username=username,
password=password, **kwargs)
headers = headers or {
'Accept': 'application/json',
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
}
if method.upper() == 'POST':
r = self.post(token_url, data=dict(urldecode(body)),
timeout=timeout, headers=headers, auth=auth,
verify=verify)
log.debug('Prepared fetch token request body %s', body)
elif method.upper() == 'GET':
# if method is not 'POST', switch body to querystring and GET
r = self.get(token_url, params=dict(urldecode(body)),
timeout=timeout, headers=headers, auth=auth,
verify=verify)
log.debug('Prepared fetch token request querystring %s', body)
else:
raise ValueError('The method kwarg must be POST or GET.')
log.debug('Request to fetch token completed with status %s.',
r.status_code)
log.debug('Request headers were %s', r.request.headers)
log.debug('Request body was %s', r.request.body)
log.debug('Response headers were %s and content %s.',
r.headers, r.text)
log.debug('Invoking %d token response hooks.',
len(self.compliance_hook['access_token_response']))
for hook in self.compliance_hook['access_token_response']:
log.debug('Invoking hook %s.', hook)
r = hook(r)
r.raise_for_status()
self._client.parse_request_body_response(r.text, scope=self.scope)
self.token = self._client.token
log.debug('Obtained token %s.', self.token)
return self.token
def token_from_fragment(self, authorization_response):
"""Parse token from the URI fragment, used by MobileApplicationClients.
:param authorization_response: The full URL of the redirect back to you
:return: A token dict
"""
self._client.parse_request_uri_response(authorization_response,
state=self._state)
self.token = self._client.token
return self.token
def refresh_token(self, token_url, refresh_token=None, body='', auth=None,
timeout=None, verify=True, **kwargs):
"""Fetch a new access token using a refresh token.
:param token_url: The token endpoint, must be HTTPS.
:param refresh_token: The refresh_token to use.
:param body: Optional application/x-www-form-urlencoded body to add the
include in the token request. Prefer kwargs over body.
:param auth: An auth tuple or method as accepted by requests.
:param timeout: Timeout of the request in seconds.
:param verify: Verify SSL certificate.
:param kwargs: Extra parameters to include in the token request.
:return: A token dict
"""
if not token_url:
raise ValueError('No token endpoint set for auto_refresh.')
if not is_secure_transport(token_url):
raise InsecureTransportError()
# Need to nullify token to prevent it from being added to the request
refresh_token = refresh_token or self.token.get('refresh_token')
self.token = {}
log.debug("Setting redirect uri to " + self.refresher.redirect_uri)
kwargs['redirect_uri'] = self.refresher.redirect_uri
log.debug('Adding auto refresh key word arguments %s.',
self.auto_refresh_kwargs)
kwargs.update(self.auto_refresh_kwargs)
log.debug('Prepared refresh token request body pre- %s', body)
body = self._client.prepare_refresh_body(body=body,
refresh_token=refresh_token, scope=self.scope, **kwargs)
log.debug('Prepared refresh token request body %s', body)
log.debug(auth)
r = self.post(token_url, data=dict(urldecode(body)), auth=auth,
timeout=timeout, verify=verify)
log.debug('Request to refresh token completed with status %s.',
r.status_code)
log.debug('Response headers were %s and content %s.',
r.headers, r.text)
log.debug('Invoking %d token response hooks.',
len(self.compliance_hook['refresh_token_response']))
for hook in self.compliance_hook['refresh_token_response']:
log.debug('Invoking hook %s.', hook)
r = hook(r)
self.token = self._client.parse_request_body_response(r.text, scope=self.scope)
if 'refresh_token' not in self.token:
log.debug('No new refresh token given. Re-using old.')
self.token['refresh_token'] = refresh_token
return self.token
def request(self, method, url, data=None, headers=None, **kwargs):
"""Intercept all requests and add the OAuth 2 token if present."""
if not is_secure_transport(url):
raise InsecureTransportError()
if self.token:
log.debug('Invoking %d protected resource request hooks.',
len(self.compliance_hook['protected_request']))
for hook in self.compliance_hook['protected_request']:
url, headers, data = hook(url, headers, data)
try:
url, headers, data = self._client.add_token(url,
http_method=method, body=data, headers=headers)
# Attempt to retrieve and save new access token if expired
except TokenExpiredError:
if self.auto_refresh_url:
token = self.refresh_token(self.auto_refresh_url,auth=self.refresher.auth,**kwargs)
if self.token_updater:
self.token_updater(token)
url, headers, data = self._client.add_token(url,
http_method=method, body=data, headers=headers)
else:
raise TokenUpdated(token)
else:
raise
return super(OAuth2Session, self).request(method, url,
headers=headers, data=data, **kwargs)
def register_compliance_hook(self, hook_type, hook):
"""Register a hook for request/response tweaking.
Available hooks are:
access_token_response invoked before token parsing.
refresh_token_response invoked before refresh token parsing.
protected_request invoked before making a request.
If you find a new hook is needed please send a GitHub PR request
or open an issue.
"""
if hook_type not in self.compliance_hook:
raise ValueError('Hook type %s is not in %s.',
hook_type, self.compliance_hook)
self.compliance_hook[hook_type].add(hook)<|fim▁end|> | from __future__ import unicode_literals
import logging |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3
# -*- Coding : UTF-8 -*-
from os import path
import github_update_checker
from setuptools import setup, find_packages
file_path = path.abspath(path.dirname(__file__))
with open(path.join(file_path, "README.md"), encoding="UTF-8") as f:
long_description = f.read()
<|fim▁hole|> description="A simple update checker for github in python",
long_description=long_description,
url="https://github.com/Tellendil/py_github_update_checker",
author="Benjamin Schubert",
author_email="[email protected]",
license="MIT",
classifiers=[
'Development Status :: 5 - Stable',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 3'
],
keywords="update",
packages=find_packages()
)<|fim▁end|> |
setup(
name="github_update_checker",
version=github_update_checker.__version__, |
<|file_name|>PendingPurchasesInstallationDialog.java<|end_file_name|><|fim▁begin|>/*
* RapidMiner
*
* Copyright (C) 2001-2014 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see http://www.gnu.org/licenses/.
*/
package com.rapid_i.deployment.update.client;
import java.awt.BorderLayout;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.KeyEvent;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.Level;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.JButton;
import javax.swing.JCheckBox;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.KeyStroke;
import javax.swing.SwingUtilities;
import javax.swing.border.EmptyBorder;
import javax.swing.border.EtchedBorder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import com.rapid_i.deployment.update.client.listmodels.AbstractPackageListModel;
import com.rapidminer.deployment.client.wsimport.PackageDescriptor;
import com.rapidminer.deployment.client.wsimport.UpdateService;
import com.rapidminer.gui.RapidMinerGUI;
import com.rapidminer.gui.tools.ExtendedJScrollPane;
import com.rapidminer.gui.tools.ProgressThread;
import com.rapidminer.gui.tools.ResourceAction;
import com.rapidminer.gui.tools.SwingTools;
import com.rapidminer.gui.tools.dialogs.ButtonDialog;
import com.rapidminer.gui.tools.dialogs.ConfirmDialog;
import com.rapidminer.io.process.XMLTools;
import com.rapidminer.tools.FileSystemService;
import com.rapidminer.tools.I18N;
import com.rapidminer.tools.LogService;
import com.rapidminer.tools.ParameterService;
import com.rapidminer.tools.XMLException;
/**
* The Dialog is eventually shown at the start of RapidMiner, if the user purchased extensions online but haven't installed them yet.
*
* @author Dominik Halfkann
*/
public class PendingPurchasesInstallationDialog extends ButtonDialog {
private static final long serialVersionUID = 1L;
private final PackageDescriptorCache packageDescriptorCache = new PackageDescriptorCache();
private AbstractPackageListModel purchasedModel = new PurchasedNotInstalledModel(packageDescriptorCache);
JCheckBox neverAskAgain = new JCheckBox(I18N.getMessage(I18N.getGUIBundle(), "gui.dialog.purchased_not_installed.not_check_on_startup"));
private final List<String> packages;
private boolean isConfirmed;
private LinkedList<PackageDescriptor> installablePackageList;
private JButton remindNeverButton;
private JButton remindLaterButton;
private JButton okButton;
private class PurchasedNotInstalledModel extends AbstractPackageListModel {
private static final long serialVersionUID = 1L;
public PurchasedNotInstalledModel(PackageDescriptorCache cache) {
super(cache, "gui.dialog.update.tab.no_packages");
}
@Override
public List<String> handleFetchPackageNames() {
return packages;
}
}
public PendingPurchasesInstallationDialog(List<String> packages) {
super("purchased_not_installed");
this.packages = packages;
remindNeverButton = remindNeverButton();
remindLaterButton = remindLaterButton();
okButton = makeOkButton("install_purchased");
layoutDefault(makeContentPanel(), NORMAL, okButton, remindNeverButton, remindLaterButton);
this.setPreferredSize(new Dimension(404, 430));
this.setMaximumSize(new Dimension(404, 430));
this.setMinimumSize(new Dimension(404, 300));
this.setSize(new Dimension(404, 430));
}
private JPanel makeContentPanel() {
BorderLayout layout = new BorderLayout(12, 12);
JPanel panel = new JPanel(layout);
panel.setBorder(new EmptyBorder(0, 12, 8, 12));
panel.add(createExtensionListScrollPane(purchasedModel), BorderLayout.CENTER);
purchasedModel.update();
JPanel southPanel = new JPanel(new BorderLayout(0, 7));
JLabel question = new JLabel(I18N.getMessage(I18N.getGUIBundle(), "gui.dialog.purchased_not_installed.should_install"));
southPanel.add(question, BorderLayout.CENTER);
southPanel.add(neverAskAgain, BorderLayout.SOUTH);
panel.add(southPanel, BorderLayout.SOUTH);
return panel;
}
private JScrollPane createExtensionListScrollPane(AbstractPackageListModel model) {
final JList updateList = new JList(model);
updateList.setCellRenderer(new UpdateListCellRenderer(true));
JScrollPane extensionListScrollPane = new ExtendedJScrollPane(updateList);
extensionListScrollPane.setBorder(BorderFactory.createEtchedBorder(EtchedBorder.LOWERED));
return extensionListScrollPane;
}
private JButton remindLaterButton() {
Action Action = new ResourceAction("ask_later") {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
wasConfirmed = false;
checkNeverAskAgain();
close();
}
};
getRootPane().getInputMap(JComponent.WHEN_IN_FOCUSED_WINDOW).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0, false), "CLOSE");
getRootPane().getActionMap().put("CLOSE", Action);
JButton button = new JButton(Action);
getRootPane().setDefaultButton(button);
return button;
}
private JButton remindNeverButton() {
Action Action = new ResourceAction("ask_never") {
private static final long serialVersionUID = 1L;
@Override
public void actionPerformed(ActionEvent e) {
wasConfirmed = false;
checkNeverAskAgain();
neverRemindAgain();
close();
}
};
JButton button = new JButton(Action);
getRootPane().setDefaultButton(button);
return button;
}
@Override
protected void ok() {
checkNeverAskAgain();
startUpdate(getPackageDescriptorList());
dispose();
}
public List<PackageDescriptor> getPackageDescriptorList() {
List<PackageDescriptor> packageList = new ArrayList<PackageDescriptor>();
for (int a = 0; a < purchasedModel.getSize(); a++) {
Object listItem = purchasedModel.getElementAt(a);
if (listItem instanceof PackageDescriptor) {
packageList.add((PackageDescriptor) listItem);
}
}
return packageList;
}
public void startUpdate(final List<PackageDescriptor> downloadList) {
final UpdateService service;
try {
service = UpdateManager.getService();
} catch (Exception e) {
SwingTools.showSimpleErrorMessage("failed_update_server", e, UpdateManager.getBaseUrl());
return;
}
new ProgressThread("resolving_dependencies", true) {
@Override
public void run() {
try {
getProgressListener().setTotal(100);
remindLaterButton.setEnabled(false);
remindNeverButton.setEnabled(false);
final HashMap<PackageDescriptor, HashSet<PackageDescriptor>> dependency = UpdateDialog.resolveDependency(downloadList, packageDescriptorCache);
getProgressListener().setCompleted(30);
installablePackageList = UpdateDialog.getPackagesforInstallation(dependency);
final HashMap<String, String> licenseNameToLicenseTextMap = UpdateDialog.collectLicenses(installablePackageList,getProgressListener(),100,30,100);
SwingUtilities.invokeLater(new Runnable() {
@Override
public void run() {
isConfirmed = ConfirmLicensesDialog.confirm(dependency, licenseNameToLicenseTextMap);
new ProgressThread("installing_updates", true) {
@Override
public void run() {
try {<|fim▁hole|> if (isConfirmed) {
getProgressListener().setTotal(100);
getProgressListener().setCompleted(20);
UpdateService service = UpdateManager.getService();
UpdateManager um = new UpdateManager(service);
List<PackageDescriptor> installedPackages = um.performUpdates(installablePackageList, getProgressListener());
getProgressListener().setCompleted(40);
if (installedPackages.size() > 0) {
int confirmation = SwingTools.showConfirmDialog((installedPackages.size() == 1 ? "update.complete_restart" : "update.complete_restart1"),
ConfirmDialog.YES_NO_OPTION, installedPackages.size());
if (confirmation == ConfirmDialog.YES_OPTION) {
RapidMinerGUI.getMainFrame().exit(true);
} else if (confirmation == ConfirmDialog.NO_OPTION) {
if (installedPackages.size() == installablePackageList.size()) {
dispose();
}
}
}
getProgressListener().complete();
}
} catch (Exception e) {
SwingTools.showSimpleErrorMessage("error_installing_update", e, e.getMessage());
} finally {
getProgressListener().complete();
}
}
}.start();
}
});
remindLaterButton.setEnabled(true);
remindNeverButton.setEnabled(true);
getProgressListener().complete();
} catch (Exception e) {
SwingTools.showSimpleErrorMessage("error_resolving_dependencies", e, e.getMessage());
}
}
}.start();
}
private void checkNeverAskAgain() {
if (neverAskAgain.isSelected()) {
ParameterService.setParameterValue(RapidMinerGUI.PROPERTY_RAPIDMINER_GUI_PURCHASED_NOT_INSTALLED_CHECK, "false");
ParameterService.saveParameters();
}
}
private void neverRemindAgain() {
LogService.getRoot().log(Level.CONFIG, "com.rapid_i.deployment.update.client.PurchasedNotInstalledDialog.saving_ignored_extensions_file");
Document doc;
try {
doc = DocumentBuilderFactory.newInstance().newDocumentBuilder().newDocument();
} catch (ParserConfigurationException e) {
LogService.getRoot().log(Level.WARNING,
I18N.getMessage(LogService.getRoot().getResourceBundle(),
"com.rapid_i.deployment.update.client.PurchasedNotInstalledDialog.creating_xml_document_error",
e),
e);
return;
}
Element root = doc.createElement(UpdateManager.NEVER_REMIND_INSTALL_EXTENSIONS_FILE_NAME);
doc.appendChild(root);
for (String i : purchasedModel.fetchPackageNames()) {
Element entryElem = doc.createElement("extension_name");
entryElem.setTextContent(i);
root.appendChild(entryElem);
}
File file = FileSystemService.getUserConfigFile(UpdateManager.NEVER_REMIND_INSTALL_EXTENSIONS_FILE_NAME);
try {
XMLTools.stream(doc, file, null);
} catch (XMLException e) {
LogService.getRoot().log(Level.WARNING,
I18N.getMessage(LogService.getRoot().getResourceBundle(),
"com.rapid_i.deployment.update.client.PurchasedNotInstalledDialog.saving_ignored_extensions_file_error",
e),
e);
}
}
}<|fim▁end|> | |
<|file_name|>request.all.rooms.event.ts<|end_file_name|><|fim▁begin|>import { Event } from "./event";
export class RequestAllRoomsEvent {<|fim▁hole|><|fim▁end|> | static readonly eventName: string = Event.RequestAllRooms;
} |
<|file_name|>course_quality.py<|end_file_name|><|fim▁begin|># pylint: disable=missing-docstring
import logging
import numpy as np
from scipy import stats
from rest_framework.generics import GenericAPIView
from rest_framework.response import Response
from edxval.api import get_videos_for_course
from openedx.core.djangoapps.request_cache.middleware import request_cached
from openedx.core.lib.api.view_utils import DeveloperErrorViewMixin, view_auth_classes
from openedx.core.lib.graph_traversals import traverse_pre_order
from xmodule.modulestore.django import modulestore
from .utils import get_bool_param, course_author_access_required
log = logging.getLogger(__name__)
@view_auth_classes()
class CourseQualityView(DeveloperErrorViewMixin, GenericAPIView):
"""
**Use Case**
**Example Requests**
GET /api/courses/v1/quality/{course_id}/
**GET Parameters**
A GET request may include the following parameters.
* all
* sections
* subsections
* units
* videos
* exclude_graded (boolean) - whether to exclude graded subsections in the subsections and units information.
**GET Response Values**
The HTTP 200 response has the following values.
* is_self_paced - whether the course is self-paced.
* sections
* total_number - number of sections in the course.
* total_visible - number of sections visible to learners in the course.
* number_with_highlights - number of sections that have at least one highlight entered.
* highlights_enabled - whether highlights are enabled in the course.
* subsections
* total_visible - number of subsections visible to learners in the course.
* num_with_one_block_type - number of visible subsections containing only one type of block.
* num_block_types - statistics for number of block types across all visible subsections.
* min
* max
* mean
* median
* mode
* units
* total_visible - number of units visible to learners in the course.
* num_blocks - statistics for number of block across all visible units.
* min
* max
* mean
* median
* mode
* videos
* total_number - number of video blocks in the course.
* num_with_val_id - number of video blocks that include video pipeline IDs.
* num_mobile_encoded - number of videos encoded through the video pipeline.
* durations - statistics for video duration across all videos encoded through the video pipeline.
* min
* max
* mean
* median
* mode
"""
@course_author_access_required
def get(self, request, course_key):
"""
Returns validation information for the given course.
"""
all_requested = get_bool_param(request, 'all', False)
store = modulestore()
with store.bulk_operations(course_key):
course = store.get_course(course_key, depth=self._required_course_depth(request, all_requested))
response = dict(
is_self_paced=course.self_paced,
)
if get_bool_param(request, 'sections', all_requested):
response.update(
sections=self._sections_quality(course)
)
if get_bool_param(request, 'subsections', all_requested):
response.update(
subsections=self._subsections_quality(course, request)
)
if get_bool_param(request, 'units', all_requested):
response.update(
units=self._units_quality(course, request)
)
if get_bool_param(request, 'videos', all_requested):
response.update(
videos=self._videos_quality(course)
)
return Response(response)
def _required_course_depth(self, request, all_requested):
if get_bool_param(request, 'units', all_requested):
# The num_blocks metric for "units" requires retrieving all blocks in the graph.
return None
elif get_bool_param(request, 'subsections', all_requested):
# The num_block_types metric for "subsections" requires retrieving all blocks in the graph.
return None
elif get_bool_param(request, 'sections', all_requested):
return 1
else:
return 0
def _sections_quality(self, course):
sections, visible_sections = self._get_sections(course)
sections_with_highlights = [s for s in visible_sections if s.highlights]
return dict(
total_number=len(sections),
total_visible=len(visible_sections),
number_with_highlights=len(sections_with_highlights),
highlights_enabled=course.highlights_enabled_for_messaging,
)
def _subsections_quality(self, course, request):
subsection_unit_dict = self._get_subsections_and_units(course, request)
num_block_types_per_subsection_dict = {}
for subsection_key, unit_dict in subsection_unit_dict.iteritems():
leaf_block_types_in_subsection = (
unit_info['leaf_block_types']
for unit_info in unit_dict.itervalues()
)
num_block_types_per_subsection_dict[subsection_key] = len(set().union(*leaf_block_types_in_subsection))
return dict(
total_visible=len(num_block_types_per_subsection_dict),
num_with_one_block_type=list(num_block_types_per_subsection_dict.itervalues()).count(1),
num_block_types=self._stats_dict(list(num_block_types_per_subsection_dict.itervalues())),
)
def _units_quality(self, course, request):
subsection_unit_dict = self._get_subsections_and_units(course, request)
num_leaf_blocks_per_unit = [
unit_info['num_leaf_blocks']
for unit_dict in subsection_unit_dict.itervalues()
for unit_info in unit_dict.itervalues()
]
return dict(
total_visible=len(num_leaf_blocks_per_unit),
num_blocks=self._stats_dict(num_leaf_blocks_per_unit),
)
def _videos_quality(self, course):
video_blocks_in_course = modulestore().get_items(course.id, qualifiers={'category': 'video'})
videos_in_val = list(get_videos_for_course(course.id))
video_durations = [video['duration'] for video in videos_in_val]
return dict(
total_number=len(video_blocks_in_course),
num_mobile_encoded=len(videos_in_val),
num_with_val_id=len([v for v in video_blocks_in_course if v.edx_video_id]),
durations=self._stats_dict(video_durations),
)
@request_cached
def _get_subsections_and_units(self, course, request):
"""
Returns {subsection_key: {unit_key: {num_leaf_blocks: <>, leaf_block_types: set(<>) }}}
for all visible subsections and units.
"""
_, visible_sections = self._get_sections(course)
subsection_dict = {}
for section in visible_sections:
visible_subsections = self._get_visible_children(section)
if get_bool_param(request, 'exclude_graded', False):
visible_subsections = [s for s in visible_subsections if not s.graded]
for subsection in visible_subsections:
unit_dict = {}
visible_units = self._get_visible_children(subsection)
for unit in visible_units:
leaf_blocks = self._get_leaf_blocks(unit)
unit_dict[unit.location] = dict(
num_leaf_blocks=len(leaf_blocks),
leaf_block_types=set(block.location.block_type for block in leaf_blocks),
)
subsection_dict[subsection.location] = unit_dict
return subsection_dict
@request_cached
def _get_sections(self, course):
return self._get_all_children(course)
def _get_all_children(self, parent):
store = modulestore()
children = [store.get_item(child_usage_key) for child_usage_key in self._get_children(parent)]
visible_children = [
c for c in children
if not c.visible_to_staff_only and not c.hide_from_toc
]
return children, visible_children
def _get_visible_children(self, parent):
_, visible_chidren = self._get_all_children(parent)
return visible_chidren
def _get_children(self, parent):
if not hasattr(parent, 'children'):
return []
else:
return parent.children
def _get_leaf_blocks(self, unit):
def leaf_filter(block):
return (
block.location.block_type not in ('chapter', 'sequential', 'vertical') and
len(self._get_children(block)) == 0
)
return [
block for block in<|fim▁hole|> traverse_pre_order(unit, self._get_visible_children, leaf_filter)
]
def _stats_dict(self, data):
if not data:
return dict(
min=None,
max=None,
mean=None,
median=None,
mode=None,
)
else:
return dict(
min=min(data),
max=max(data),
mean=np.around(np.mean(data)),
median=np.around(np.median(data)),
mode=stats.mode(data, axis=None)[0][0],
)<|fim▁end|> | |
<|file_name|>OptionsMenuData.cpp<|end_file_name|><|fim▁begin|>/*
OptionsMenuData.cpp
Author : Cyrielle
File under GNU GPL v3.0 license
*/
#include "OptionsMenuData.hpp"
#include "ResourceLoader.hpp"
namespace OpMon {
namespace Model {
OptionsMenuData::OptionsMenuData(UiData *data)
: uidata(data) {
ResourceLoader::load(background, "backgrounds/options.png");
ResourceLoader::load(selectBar, "sprites/misc/selectBar.png");
ResourceLoader::load(langBg, "backgrounds/lang.png");<|fim▁hole|> ResourceLoader::load(controlsBg, "backgrounds/controls.png");
ResourceLoader::load(volumeCur, "sprites/misc/cursor.png");
ResourceLoader::load(keyChange, "sprites/misc/keyChange.png");
}
} // namespace Model
} // namespace OpMon<|fim▁end|> | ResourceLoader::load(yesTx, "sprites/misc/yes.png");
ResourceLoader::load(creditsBg, "backgrounds/credits.png"); |
<|file_name|>htmlvideoelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::{LoadBlocker, LoadType};
use crate::dom::attr::Attr;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::HTMLVideoElementBinding::HTMLVideoElementMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::document::Document;
use crate::dom::element::{AttributeMutation, Element};
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlmediaelement::{HTMLMediaElement, ReadyState};
use crate::dom::node::{document_from_node, window_from_node, Node};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::virtualmethods::VirtualMethods;
use crate::fetch::FetchCanceller;
use crate::image_listener::{generate_cache_listener_for_element, ImageCacheListener};
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use dom_struct::dom_struct;
use euclid::default::Size2D;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use net_traits::image_cache::{
ImageCache, ImageCacheResult, ImageOrMetadataAvailable, ImageResponse, PendingImageId,
UsePlaceholder,
};
use net_traits::request::{CredentialsMode, Destination, RequestBuilder};
use net_traits::{
CoreResourceMsg, FetchChannels, FetchMetadata, FetchResponseListener, FetchResponseMsg,
};
use net_traits::{NetworkError, ResourceFetchTiming, ResourceTimingType};
use servo_media::player::video::VideoFrame;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
const DEFAULT_WIDTH: u32 = 300;
const DEFAULT_HEIGHT: u32 = 150;
#[dom_struct]
pub struct HTMLVideoElement {
htmlmediaelement: HTMLMediaElement,
/// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
video_width: Cell<u32>,
/// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
video_height: Cell<u32>,
/// Incremented whenever tasks associated with this element are cancelled.
generation_id: Cell<u32>,
/// Poster frame fetch request canceller.
poster_frame_canceller: DomRefCell<FetchCanceller>,
/// Load event blocker. Will block the load event while the poster frame
/// is being fetched.
load_blocker: DomRefCell<Option<LoadBlocker>>,
/// A copy of the last frame
#[ignore_malloc_size_of = "VideoFrame"]
last_frame: DomRefCell<Option<VideoFrame>>,
}
impl HTMLVideoElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> HTMLVideoElement {
HTMLVideoElement {
htmlmediaelement: HTMLMediaElement::new_inherited(local_name, prefix, document),
video_width: Cell::new(DEFAULT_WIDTH),
video_height: Cell::new(DEFAULT_HEIGHT),
generation_id: Cell::new(0),
poster_frame_canceller: DomRefCell::new(Default::default()),
load_blocker: Default::default(),
last_frame: Default::default(),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
) -> DomRoot<HTMLVideoElement> {
Node::reflect_node(
Box::new(HTMLVideoElement::new_inherited(
local_name, prefix, document,
)),
document,
)
}
pub fn get_video_width(&self) -> u32 {
self.video_width.get()
}
pub fn set_video_width(&self, width: u32) {
self.video_width.set(width);
}
pub fn get_video_height(&self) -> u32 {
self.video_height.get()
}
pub fn set_video_height(&self, height: u32) {
self.video_height.set(height);
}
pub fn allow_load_event(&self) {
LoadBlocker::terminate(&mut *self.load_blocker.borrow_mut());
}
pub fn get_current_frame_data(&self) -> Option<(Option<ipc::IpcSharedMemory>, Size2D<u32>)> {
let frame = self.htmlmediaelement.get_current_frame();
if frame.is_some() {
*self.last_frame.borrow_mut() = frame;
}
match self.last_frame.borrow().as_ref() {
Some(frame) => {<|fim▁hole|> let size = Size2D::new(frame.get_width() as u32, frame.get_height() as u32);
if !frame.is_gl_texture() {
let data = Some(ipc::IpcSharedMemory::from_bytes(&frame.get_data()));
Some((data, size))
} else {
// XXX(victor): here we only have the GL texture ID.
Some((None, size))
}
},
None => None,
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn fetch_poster_frame(&self, poster_url: &str) {
// Step 1.
let cancel_receiver = self.poster_frame_canceller.borrow_mut().initialize();
self.generation_id.set(self.generation_id.get() + 1);
// Step 2.
if poster_url.is_empty() {
return;
}
// Step 3.
let poster_url = match document_from_node(self).url().join(&poster_url) {
Ok(url) => url,
Err(_) => return,
};
// Step 4.
// We use the image cache for poster frames so we save as much
// network activity as possible.
let window = window_from_node(self);
let image_cache = window.image_cache();
let sender = generate_cache_listener_for_element(self);
let cache_result = image_cache.track_image(
poster_url.clone(),
window.origin().immutable().clone(),
None,
sender,
UsePlaceholder::No,
);
match cache_result {
ImageCacheResult::Available(ImageOrMetadataAvailable::ImageAvailable {
image,
url,
..
}) => {
self.process_image_response(ImageResponse::Loaded(image, url));
},
ImageCacheResult::ReadyForRequest(id) => {
self.do_fetch_poster_frame(poster_url, id, cancel_receiver)
},
_ => (),
}
}
/// https://html.spec.whatwg.org/multipage/#poster-frame
fn do_fetch_poster_frame(
&self,
poster_url: ServoUrl,
id: PendingImageId,
cancel_receiver: ipc::IpcReceiver<()>,
) {
// Continuation of step 4.
let document = document_from_node(self);
let request = RequestBuilder::new(poster_url.clone(), document.global().get_referrer())
.destination(Destination::Image)
.credentials_mode(CredentialsMode::Include)
.use_url_credentials(true)
.origin(document.origin().immutable().clone())
.pipeline_id(Some(document.global().pipeline_id()));
// Step 5.
// This delay must be independent from the ones created by HTMLMediaElement during
// its media load algorithm, otherwise a code like
// <video poster="poster.png"></video>
// (which triggers no media load algorithm unless a explicit call to .load() is done)
// will block the document's load event forever.
let mut blocker = self.load_blocker.borrow_mut();
LoadBlocker::terminate(&mut *blocker);
*blocker = Some(LoadBlocker::new(
&document_from_node(self),
LoadType::Image(poster_url.clone()),
));
let window = window_from_node(self);
let context = Arc::new(Mutex::new(PosterFrameFetchContext::new(
self, poster_url, id,
)));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = window
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
let global = self.global();
global
.core_resource_thread()
.send(CoreResourceMsg::Fetch(
request,
FetchChannels::ResponseMsg(action_sender, Some(cancel_receiver)),
))
.unwrap();
}
}
impl HTMLVideoElementMethods for HTMLVideoElement {
// https://html.spec.whatwg.org/multipage/#dom-video-videowidth
fn VideoWidth(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_width.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-videoheight
fn VideoHeight(&self) -> u32 {
if self.htmlmediaelement.get_ready_state() == ReadyState::HaveNothing {
return 0;
}
self.video_height.get()
}
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_getter!(Poster, "poster");
// https://html.spec.whatwg.org/multipage/#dom-video-poster
make_setter!(SetPoster, "poster");
// For testing purposes only. This is not an event from
// https://html.spec.whatwg.org/multipage/#dom-video-poster
event_handler!(postershown, GetOnpostershown, SetOnpostershown);
}
impl VirtualMethods for HTMLVideoElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLMediaElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
if let Some(new_value) = mutation.new_value(attr) {
match attr.local_name() {
&local_name!("poster") => {
self.fetch_poster_frame(&new_value);
},
_ => (),
};
}
}
}
impl ImageCacheListener for HTMLVideoElement {
fn generation_id(&self) -> u32 {
self.generation_id.get()
}
fn process_image_response(&self, response: ImageResponse) {
self.htmlmediaelement.process_poster_response(response);
}
}
struct PosterFrameFetchContext {
/// Reference to the script thread image cache.
image_cache: Arc<dyn ImageCache>,
/// The element that initiated the request.
elem: Trusted<HTMLVideoElement>,
/// The cache ID for this request.
id: PendingImageId,
/// True if this response is invalid and should be ignored.
cancelled: bool,
/// Timing data for this resource
resource_timing: ResourceFetchTiming,
/// Url for the resource
url: ServoUrl,
}
impl FetchResponseListener for PosterFrameFetchContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponse(metadata.clone()));
let metadata = metadata.ok().map(|meta| match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
});
let status_is_ok = metadata
.as_ref()
.and_then(|m| m.status.as_ref())
.map_or(true, |s| s.0 >= 200 && s.0 < 300);
if !status_is_ok {
self.cancelled = true;
self.elem
.root()
.poster_frame_canceller
.borrow_mut()
.cancel();
}
}
fn process_response_chunk(&mut self, payload: Vec<u8>) {
if self.cancelled {
// An error was received previously, skip processing the payload.
return;
}
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseChunk(payload));
}
fn process_response_eof(&mut self, response: Result<ResourceFetchTiming, NetworkError>) {
self.elem.root().allow_load_event();
self.image_cache
.notify_pending_response(self.id, FetchResponseMsg::ProcessResponseEOF(response));
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for PosterFrameFetchContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
let initiator_type = InitiatorType::LocalName(
self.elem
.root()
.upcast::<Element>()
.local_name()
.to_string(),
);
(initiator_type, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
document_from_node(&*self.elem.root()).global()
}
}
impl PreInvoke for PosterFrameFetchContext {
fn should_invoke(&self) -> bool {
true
}
}
impl PosterFrameFetchContext {
fn new(elem: &HTMLVideoElement, url: ServoUrl, id: PendingImageId) -> PosterFrameFetchContext {
let window = window_from_node(elem);
PosterFrameFetchContext {
image_cache: window.image_cache(),
elem: Trusted::new(elem),
id,
cancelled: false,
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
url,
}
}
}<|fim▁end|> | |
<|file_name|>view_container_ref.ts<|end_file_name|><|fim▁begin|>import {ListWrapper, List} from 'angular2/src/core/facade/collection';
import {ResolvedBinding} from 'angular2/di';
import {isPresent, isBlank} from 'angular2/src/core/facade/lang';
import * as avmModule from './view_manager';
import * as viewModule from './view';
import {ElementRef} from './element_ref';
import {TemplateRef} from './template_ref';
import {ViewRef, HostViewRef, ProtoViewRef, internalView} from './view_ref';
/**
* A location where {@link ViewRef}s can be attached.
*
* A `ViewContainerRef` represents a location in a {@link ViewRef} where other child
* {@link ViewRef}s can be inserted. Adding and removing views is the only way of structurally
* changing the rendered DOM of the application.
*/
export class ViewContainerRef {
/**
* @private
*/
constructor(public viewManager: avmModule.AppViewManager, public element: ElementRef) {}
private _getViews(): List<viewModule.AppView> {
var vc = internalView(this.element.parentView).viewContainers[this.element.boundElementIndex];
return isPresent(vc) ? vc.views : [];
}
/**
* Remove all {@link ViewRef}s at current location.
*/
clear(): void {
for (var i = this.length - 1; i >= 0; i--) {
this.remove(i);
}
}
<|fim▁hole|> */
get(index: number): ViewRef { return this._getViews()[index].ref; }
/**
* Returns number of {@link ViewRef}s currently attached at this location.
*/
get length(): number { return this._getViews().length; }
/**
* Create and insert a {@link ViewRef} into the view-container.
*
* - `protoViewRef` (optional) {@link ProtoViewRef} - The `ProtoView` to use for creating
* `View` to be inserted at this location. If `ViewContainer` is created at a location
* of inline template, then `protoViewRef` is the `ProtoView` of the template.
* - `atIndex` (optional) `number` - location of insertion point. (Or at the end if unspecified.)
* - `context` (optional) {@link ElementRef} - Context (for expression evaluation) from the
* {@link ElementRef} location. (Or current context if unspecified.)
* - `bindings` (optional) Array of {@link ResolvedBinding} - Used for configuring
* `ElementInjector`.
*
* Returns newly created {@link ViewRef}.
*/
// TODO(rado): profile and decide whether bounds checks should be added
// to the methods below.
createEmbeddedView(templateRef: TemplateRef, atIndex: number = -1): ViewRef {
if (atIndex == -1) atIndex = this.length;
return this.viewManager.createEmbeddedViewInContainer(this.element, atIndex, templateRef);
}
createHostView(protoViewRef: ProtoViewRef = null, atIndex: number = -1,
dynamicallyCreatedBindings: ResolvedBinding[] = null): HostViewRef {
if (atIndex == -1) atIndex = this.length;
return this.viewManager.createHostViewInContainer(this.element, atIndex, protoViewRef,
dynamicallyCreatedBindings);
}
/**
* Insert a {@link ViewRef} at specefic index.
*
* The index is location at which the {@link ViewRef} should be attached. If omitted it is
* inserted at the end.
*
* Returns the inserted {@link ViewRef}.
*/
insert(viewRef: ViewRef, atIndex: number = -1): ViewRef {
if (atIndex == -1) atIndex = this.length;
return this.viewManager.attachViewInContainer(this.element, atIndex, viewRef);
}
/**
* Return the index of already inserted {@link ViewRef}.
*/
indexOf(viewRef: ViewRef): number {
return ListWrapper.indexOf(this._getViews(), internalView(viewRef));
}
/**
* Remove a {@link ViewRef} at specific index.
*
* If the index is omitted last {@link ViewRef} is removed.
*/
remove(atIndex: number = -1): void {
if (atIndex == -1) atIndex = this.length - 1;
this.viewManager.destroyViewInContainer(this.element, atIndex);
// view is intentionally not returned to the client.
}
/**
* The method can be used together with insert to implement a view move, i.e.
* moving the dom nodes while the directives in the view stay intact.
*/
detach(atIndex: number = -1): ViewRef {
if (atIndex == -1) atIndex = this.length - 1;
return this.viewManager.detachViewInContainer(this.element, atIndex);
}
}<|fim▁end|> | /**
* Return a {@link ViewRef} at specific index. |
<|file_name|>jquery.equalHeight.js<|end_file_name|><|fim▁begin|>import debounce from 'debounce';
import $ from 'jquery';
const groupElementsByTop = (groups, element) => {
const top = $(element).offset().top;
groups[top] = groups[top] || [];
groups[top].push(element);
return groups;
};
const groupElementsByZero = (groups, element) => {
groups[0] = groups[0] || [];
groups[0].push(element);
return groups;
};
const clearHeight = elements => $(elements).css('height', 'auto');
const getHeight = element => $(element).height();
const applyMaxHeight = (elements) => {
const heights = elements.map(getHeight);
const maxHeight = Math.max.apply(null, heights);
$(elements).height(maxHeight);
};
const equalizeHeights = (elements, groupByTop) => {
// Sort into groups.
const groups = groupByTop ?
elements.reduce(groupElementsByTop, {}) :
elements.reduce(groupElementsByZero, {});
// Convert to arrays.
const groupsAsArray = Object.keys(groups).map((key) => {
return groups[key];
});
// Apply max height.<|fim▁hole|>};
$.fn.equalHeight = function ({
groupByTop = false,
resizeTimeout = 20,
updateOnDOMReady = true,
updateOnDOMLoad = false
} = {}) {
// Convert to native array.
const elements = this.toArray();
// Handle resize event.
$(window).on('resize', debounce(() => {
equalizeHeights(elements, groupByTop);
}, resizeTimeout));
// Handle load event.
$(window).on('load', () => {
if (updateOnDOMLoad) {
equalizeHeights(elements, groupByTop);
}
});
// Handle ready event.
$(document).on('ready', () => {
if (updateOnDOMReady) {
equalizeHeights(elements, groupByTop);
}
});
return this;
};<|fim▁end|> | groupsAsArray.forEach(clearHeight);
groupsAsArray.forEach(applyMaxHeight); |
<|file_name|>ttree_lex.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
# Author: Andrew Jewett (jewett.aij at g mail)
# http://www.chem.ucsb.edu/~sheagroup
# License: 3-clause BSD License (See LICENSE.TXT)
# Copyright (c) 2012, Regents of the University of California
# All rights reserved.
"""A lexical analyzer class for simple shell-like syntaxes.
This version has been modified slightly to work better with unicode.
It was forked from the version of shlex that ships with python 3.2.2.
A few minor features and functions have been added. """
# Module and documentation by Eric S. Raymond, 21 Dec 1998
# Input stacking and error message cleanup added by ESR, March 2000
# push_source() and pop_source() made explicit by ESR, January 2001.
# Posix compliance, split(), string arguments, and
# iterator interface by Gustavo Niemeyer, April 2003.
# ("wordterminators" (unicode support) hack by Andrew Jewett September 2011)
import os.path
import sys
from collections import deque
import re, fnmatch
import string
#import gc
try:
from cStringIO import StringIO
except ImportError:
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
__all__ = ["TtreeShlex",
"split",
"LineLex",
"SplitQuotedString",
"EscCharStrToChar",
"SafelyEncodeString",
"RemoveOuterQuotes",
"MaxLenStr",
"HasWildCard",
#"IsRegex",
"InputError",
"ErrorLeader",
"SrcLoc",
"OSrcLoc",
"TextBlock",
"VarRef",
"VarNPtr",
"VarBinding",
"SplitTemplate",
"SplitTemplateMulti",
"TableFromTemplate",
"ExtractCatName",
#"_TableFromTemplate",
#"_DeleteLineFromTemplate",
"DeleteLinesWithBadVars",
"TemplateLexer"]
class TtreeShlex(object):
""" A lexical analyzer class for simple shell-like syntaxes.
TtreeShlex is a backwards-compatible version of python's standard shlex
module. It has the additional member: "self.wordterminators", which
overrides the "self.wordchars" member. This enables better handling of
unicode characters by allowing a much larger variety of characters to
appear in words or tokens parsed by TtreeShlex.
"""
custom_path = None
def __init__(self,
instream=None,
infile=None,
custom_include_path=None,
posix=False):
if isinstance(instream, str):
instream = StringIO(instream)
if instream is not None:
self.instream = instream
self.infile = infile
else:
self.instream = sys.stdin
self.infile = None
self.posix = posix
if posix:
self.eof = None
else:
self.eof = ''
self.commenters = '#'
self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')
if self.posix:
self.wordchars += ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ'
'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ')
self.wordterminators = set([]) #WORDTERMINATORS
self.prev_space_terminator = '' #WORDTERMINATORS
self.whitespace = ' \t\r\f\n'
self.whitespace_split = False
self.quotes = '\'"'
self.escape = '\\'
self.escapedquotes = '"'
self.state = ' '
self.pushback = deque()
self.lineno = 1
self.debug = 0
self.token = ''
self.filestack = deque()
# self.source_triggers
# are tokens which allow the seamless insertion of other
# files into the file being read.
self.source_triggers=set(['source'])
self.source_triggers_x=set([])
#Note: self.source_triggers_x
# This is a subset of self.source_triggers.
# In this case file inclusion is exclusive.
# In other words, if one of these tokens
# is encountered, the file is only included
# if it has not been included already.
self.source_files_restricted = set([])
self.include_path = []
if TtreeShlex.custom_path:
include_path_list = TtreeShlex.custom_path.split(':')
self.include_path += [d for d in include_path_list if len(d)>0]
if 'TTREE_PATH' in os.environ:
include_path_list = os.environ['TTREE_PATH'].split(':')
self.include_path += [d for d in include_path_list if len(d)>0]
if self.debug:
print('TtreeShlex: reading from %s, line %d' \
% (self.instream, self.lineno))
self.end_encountered = False
@staticmethod #WORDTERMINATORS
def _belongs_to(char, include_chars, exclude_chars): #WORDTERMINATORS
if ((not exclude_chars) or (len(exclude_chars)==0)): #WORDTERMINATORS
return char in include_chars #WORDTERMINATORS
else: #WORDTERMINATORS
return char not in exclude_chars #WORDTERMINATORS
def push_raw_text(self, text):
"""Push a block of text onto the stack popped by the ReadLine() method.
(If multiple lines are present in the text, (which is determined by
self.line_terminators), then the text is split into multiple lines
and each one of them is pushed onto this stack individually.
The "self.lineno" counter is also adjusted, depending on the number
of newline characters in "line".
Do not strip off the newline, or other line terminators
at the end of the text block before using push_raw_text()!
"""
if self.debug >= 1:
print("TtreeShlex: pushing token " + repr(text))
for c in reversed(text): #WORDTERMINATORS
self.pushback.appendleft(c) #WORDTERMINATORS
if c == '\n': #WORDTERMINATORS
self.lineno -= 1 #WORDTERMINATORS
if len(text) > 0: #WORDTERMINATORS
self.end_encountered = False #WORDTERMINATORS
def push_token(self, text):
"Push a token onto the stack popped by the get_token method"
self.push_raw_text(text+self.prev_space_terminator)
def push_source(self, newstream, newfile=None):
"Push an input source onto the lexer's input source stack."
if isinstance(newstream, str):
newstream = StringIO(newstream)
self.filestack.appendleft((self.infile, self.instream, self.lineno))
self.infile = newfile
self.instream = newstream
self.lineno = 1
if self.debug:
if newfile is not None:
print('TtreeShlex: pushing to file %s' % (self.infile,))
else:
print('TtreeShlex: pushing to stream %s' % (self.instream,))
def pop_source(self):
"Pop the input source stack."
self.instream.close()
(self.infile, self.instream, self.lineno) = self.filestack.popleft()
if self.debug:
print('TtreeShlex: popping to %s, line %d' \
% (self.instream, self.lineno))
self.state = ' '
def get_token(self):
"Get a token from the input stream (or from stack if it's nonempty)"
#### #CHANGING: self.pushback is now a stack of characters, not tokens #WORDTERMINATORS
#### if self.pushback: #WORDTERMINATORS
#### tok = self.pushback.popleft() #WORDTERMINATORS
#### if self.debug >= 1: #WORDTERMINATORS
#### print("TtreeShlex: popping token " + repr(tok)) #WORDTERMINATORS
#### return tok #WORDTERMINATORS
#### No pushback. Get a token. #WORDTERMINATORS
raw = self.read_token()
# Handle inclusions
if self.source_triggers is not None:
while raw in self.source_triggers:
fname=self.read_token()
spec = self.sourcehook(fname)
if spec:
(newfile, newstream) = spec
if ((raw not in self.source_triggers_x) or
(newfile not in self.source_files_restricted)):
self.push_source(newstream, newfile)
if raw in self.source_triggers_x:
self.source_files_restricted.add(newfile)
else:
if self.debug >= 0:
sys.stderr.write('\ndebug warning: duplicate attempt to import file:\n \"'+newfile+'\"\n')
raw = self.get_token()
# Maybe we got EOF instead?
while raw == self.eof:
if not self.filestack:
return self.eof
else:
self.pop_source()
raw = self.get_token()
# Neither inclusion nor EOF
if self.debug >= 1:
if raw != self.eof:
print("TtreeShlex: token=" + repr(raw))
else:
print("TtreeShlex: token=EOF")
if raw == self.eof: #WORDTERMINATORS
self.end_encountered = True #WORDTERMINATORS
return raw
def read_char(self):
if self.pushback: #WORDTERMINATORS
nextchar = self.pushback.popleft() #WORDTERMINATORS
assert((type(nextchar) is str) and (len(nextchar)==1)) #WORDTERMINATORS
else: #WORDTERMINATORS
nextchar = self.instream.read(1) #WORDTERMINATORS
return nextchar
def read_token(self):
self.prev_space_terminator = '' #WORDTERMINATORS
quoted = False
escapedstate = ' '
while True:
#### self.pushback is now a stack of characters, not tokens #WORDTERMINATORS
nextchar = self.read_char()
if nextchar == '\n':
self.lineno = self.lineno + 1
if self.debug >= 3:
print("TtreeShlex: in state", repr(self.state), \
"I see character:", repr(nextchar))
if self.state is None:
self.token = '' # past end of file
break
elif self.state == ' ':
if not nextchar:
self.state = None # end of file
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print("TtreeShlex: I see whitespace in whitespace state")
if self.token or (self.posix and quoted):
# Keep track of which whitespace
# character terminated the token.
self.prev_space_terminator = nextchar #WORDTERMINATORS
break # emit current token
else:
continue
elif nextchar in self.commenters:
self.instream.readline()
self.lineno = self.lineno + 1
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif TtreeShlex._belongs_to(nextchar, #WORDTERMINATORS
self.wordchars, #WORDTERMINATORS
self.wordterminators):#WORDTERMINATORS
self.token = nextchar
self.state = 'a'
elif nextchar in self.quotes:
if not self.posix:
self.token = nextchar
self.state = nextchar
elif self.whitespace_split:
self.token = nextchar
self.state = 'a'
else:
self.token = nextchar
if self.token or (self.posix and quoted):
break # emit current token
else:
continue
elif self.state in self.quotes:
quoted = True
if not nextchar: # end of file
if self.debug >= 2:
print("TtreeShlex: I see EOF in quotes state")
# XXX what error should be raised here?
raise ValueError("Error at or before "+self.error_leader()+"\n"
" No closing quotation.")
if nextchar == self.state:
if not self.posix:
self.token = self.token + nextchar
self.state = ' '
break
else:
self.state = 'a'
elif self.posix and nextchar in self.escape and \
self.state in self.escapedquotes:
escapedstate = self.state
self.state = nextchar
else:
self.token = self.token + nextchar
elif self.state in self.escape:
if not nextchar: # end of file
if self.debug >= 2:
print("TtreeShlex: I see EOF in escape state")
# XXX what error should be raised here?
raise ValueError("No escaped character")
# In posix shells, only the quote itself or the escape
# character may be escaped within quotes.
if escapedstate in self.quotes and \
nextchar != self.state and nextchar != escapedstate:
self.token = self.token + self.state
self.token = self.token + nextchar
self.state = escapedstate
elif self.state == 'a':
if not nextchar:
self.state = None # end of file
break
elif nextchar in self.whitespace:
if self.debug >= 2:
print("TtreeShlex: I see whitespace in word state")
self.state = ' '
if self.token or (self.posix and quoted):
# Keep track of which whitespace
# character terminated the token.
self.prev_space_terminator = nextchar #WORDTERMINATORS
break # emit current token
else:
continue
elif nextchar in self.commenters:
comment_contents = self.instream.readline()
self.lineno = self.lineno + 1
if self.posix:
self.state = ' '
if self.token or (self.posix and quoted):
# Keep track of which character(s) terminated
# the token (including whitespace and comments).
self.prev_space_terminator = nextchar + comment_contents #WORDTERMINATORS
break # emit current token
else:
continue
elif self.posix and nextchar in self.quotes:
self.state = nextchar
elif self.posix and nextchar in self.escape:
escapedstate = 'a'
self.state = nextchar
elif (TtreeShlex._belongs_to(nextchar, #WORDTERMINATORS
self.wordchars, #WORDTERMINATORS
self.wordterminators)#WORDTERMINATORS
or (nextchar in self.quotes) #WORDTERMINATORS
or (self.whitespace_split)): #WORDTERMINATORS
self.token = self.token + nextchar
else:
self.pushback.appendleft(nextchar)
if self.debug >= 2:
print("TtreeShlex: I see punctuation in word state")
self.state = ' '
if self.token:
break # emit current token
else:
continue
result = self.token
self.token = ''
if self.posix and not quoted and result == '':
result = None
if self.debug > 1:
if result:
print("TtreeShlex: raw token=" + repr(result))
else:
print("TtreeShlex: raw token=EOF")
return result
def sourcehook(self, newfile):
"Hook called on a filename to be sourced."
newfile = RemoveOuterQuotes(newfile)
# This implements cpp-like semantics for relative-path inclusion.
if isinstance(self.infile, str) and not os.path.isabs(newfile):
newfile_full = os.path.join(os.path.dirname(self.infile), newfile)
try:
f = open(newfile_full, "r")
except IOError:
# If not found,
err = True
# ...then check to see if the file is in one of the
# directories in the self.include_path list.
for d in self.include_path:
newfile_full = os.path.join(d, newfile)
try:
f = open(newfile_full, "r")
err = False
break
except IOError:
err=True
if err:
raise InputError('Error at '+self.error_leader()+'\n'
' unable to open file \"'+newfile+'\"\n'
' for reading.\n')
return (newfile, f)
def error_leader(self, infile=None, lineno=None):
"Emit a C-compiler-like, Emacs-friendly error-message leader."
if infile is None:
infile = self.infile
if lineno is None:
lineno = self.lineno
return "\"%s\", line %d: " % (infile, lineno)
def __iter__(self):
return self
def __next__(self):
token = self.get_token()
if token == self.eof:
raise StopIteration
return token
def __bool__(self):
return not self.end_encountered
# For compatibility with python 2.x, I must also define:
def __nonzero__(self):
return self.__bool__()
# The split() function was originally from shlex
# It is included for backwards compatibility.
def split(s, comments=False, posix=True):
lex = TtreeShlex(s, posix=posix)
lex.whitespace_split = True
if not comments:
lex.commenters = ''
return list(lex)
##################### NEW ADDITIONS (may be removed later) #################
#"""
# -- linelex.py --
#linelex.py defines the LineLex class, which inherits from, and further
#augments the capabilities of TtreeShlex by making it easier to parse
#individual lines one at a time. (The original shlex's "source" inclusion
#ability still works when reading entire lines, and lines are still counted.)
#
#"""
#import sys
class InputError(Exception):
""" A generic exception object containing a string for error reporting.
(Raising this exception implies that the caller has provided
a faulty input file or argument.)
"""
def __init__(self, err_msg):
self.err_msg = err_msg
def __str__(self):
return self.err_msg
def __repr__(self):
return str(self)
def ErrorLeader(infile, lineno):
return '\"'+infile+'\", line '+str(lineno)
class SrcLoc(object):
""" SrcLoc is essentially nothing more than a 2-tuple containing the name
of a file (str) and a particular line number inside that file (an integer).
"""
def __init__(self, infile='', lineno=-1):
self.infile = infile
self.lineno = lineno
def SplitQuotedString(string,
quotes='\'\"',
delimiters=' \t\r\f\n',
escape='\\',
comment_char='#'):
tokens = []
token = ''
reading_token = True
escaped_state = False
quote_state = None
for c in string:
if (c in comment_char) and (not escaped_state) and (quote_state==None):
tokens.append(token)
return tokens
elif (c in delimiters) and (not escaped_state) and (quote_state==None):
if reading_token:
tokens.append(token)
token = ''
reading_token = False
elif c in escape:
if escaped_state:
token += c
reading_token = True
escaped_state = False
else:
escaped_state = True
# and leave c (the '\' character) out of token
elif (c in quotes) and (not escaped_state):
if (quote_state != None):
if (c == quote_state):
quote_state = None
else:
quote_state = c
token += c
reading_token = True
else:
if (c == 'n') and (escaped_state == True):
c = '\n'
elif (c == 't') and (escaped_state == True):
c = '\t'
elif (c == 'r') and (escaped_state == True):
c = '\r'
elif (c == 'f') and (escaped_state == True):
c = '\f'
token += c
reading_token = True
escaped_state = False
if len(string) > 0:
tokens.append(token)
return tokens
def EscCharStrToChar(s_in, escape='\\'):
"""
EscCharStrToChar() replaces any escape sequences
in a string with their 1-character equivalents.
"""
assert(len(escape) > 0)
out_lstr = []
escaped_state = False
for c in s_in:
if escaped_state:
if (c == 'n'):
out_lstr.append('\n')
elif (c == 't'):
out_lstr.append('\t')
elif (c == 'r'):
out_lstr.append('\r')
elif (c == 'f'):
out_lstr.append('\f')
elif (c == '\''):
out_lstr.append('\'')
elif (c == '\"'):
out_lstr.append('\"')
elif c in escape:
out_lstr.append(c)
else:
out_lstr.append(escape+c) # <- keep both characters
escaped_state = False
else:
if c in escape:
escaped_state = True
else:
out_lstr.append(c)
return ''.join(out_lstr)
def SafelyEncodeString(in_str,
quotes='\'\"',
delimiters=' \t\r\f\n',
escape='\\',
comment_char='#'):
"""
SafelyEncodeString(in_str) scans through the input string (in_str),
and returns a new string in which probletic characters
(like newlines, tabs, quotes, etc), are replaced by their two-character
backslashed equivalents (like '\n', '\t', '\'', '\"', etc).
The escape character is the backslash by default, but it too can be
overridden to create custom escape sequences
(but this does not effect the encoding for characters like '\n', '\t').
"""
assert(len(escape) > 0)
out_lstr = []
use_outer_quotes = False
for c in in_str:
if (c == '\n'):
c = '\\n'
elif (c == '\t'):
c = '\\t'
elif (c == '\r'):
c = '\\r'
elif (c == '\f'):
c = '\\f'
elif c in quotes:
c = escape[0]+c
elif c in escape:
c = c+c
elif c in delimiters:
use_outer_quotes = True
# hmm... that's all that comes to mind. Did I leave anything out?
out_lstr.append(c)
if use_outer_quotes:
out_lstr = ['\"'] + out_lstr + ['\"']
return ''.join(out_lstr)
def RemoveOuterQuotes(text, quotes='\"\''):
if ((len(text)>=2) and (text[0] in quotes) and (text[-1]==text[0])):
return text[1:-1]
else:
return text
def MaxLenStr(s1, s2):
if len(s2) > len(s1):
return s2
else:
return s1
#def IsRegex(pat):
# """
# Check to see if string (pat) is bracketed by slashes.
#
# """
# return (len(pat)>=2) and (pat[0]=='/') and (pat[-1] == '/')
def HasWildCard(pat):
"""
Returns true if a string (pat) contains a '*' or '?' character.
"""
return (pat.find('*') != -1) or (pat.find('?') != -1)
#def HasWildCard(pat):
# """
# Returns true if a string (pat) contains a non-backslash-protected
# * or ? character.
#
# """
# N=len(pat)
# i=0
# while i < N:
# i = pat.find('*', i, N)
# if i == -1:
# break
# elif (i==0) or (pat[i-1] != '\\'):
# return True
# i += 1
# i=0
# while i < N:
# i = pat.find('?', i, N)
# if i == -1:
# break
# elif (i==0) or (pat[i-1] != '\\'):
# return True
# i += 1
# return False
def MatchesPattern(s, pattern):
if type(pattern) is str:
#old code:
#if ((len(s) > 1) and (s[0] == '/') and (s[-1] == '/'):
# re_string = p[1:-1] # strip off the slashes '/' and '/'
# if not re.search(re_string, s):
# return False
#new code:
# uses precompiled regular expressions (See "pattern.search" below)
if HasWildCard(pattern):
if not fnmatch.fnmatchcase(s, pattern):
return False
elif s != pattern:
return False
else:
#assert(type(p) is _sre.SRE_Match)
# I assume pattern = re.compile(some_reg_expr)
if not pattern.search(s):
return False
return True
def MatchesAll(multi_string, pattern):
assert(len(multi_string) == len(pattern))
for i in range(0, len(pattern)):
if not MatchesPattern(multi_string[i], pattern[i]):
return False
return True
class LineLex(TtreeShlex):
""" This class extends the TtreeShlex module (a slightly modified
version of the python 3.2.2 version of shlex). LineLex has the
ability to read one line at a time (in addition to one token at a time).
(Many files and scripts must be parsed one line at a time instead of one
token at a time. In these cases, the whitespace position also matters.)
Arguably, this class might not be necessary.
I could get rid of this class completely. That would be nice. To do that
we would need to augment and generalize shlex's get_token() member function
to make it read lines, not just tokens. Of course, you can always
change the wordchars (or wordterminators). Even so, there are two other
difficulties using the current version of shlex.get_token() to read lines:
1) File inclusion happen whenever the beginning of a line/token matches one
of the "source_triggers" (not the whole line as required by get_token()).
2) Lines ending in a special character (by default the backslash character)
continue on to the next line.
This code seems to work on our test files, but I'm sure there are bugs.<|fim▁hole|>
"""
def __init__(self,
instream=None,
infile=None,
posix=False):
TtreeShlex.__init__(self, instream, infile, posix)
self.line_terminators = '\n'
self.line_extend_chars = '\\'
self.skip_comments_during_readline = True
def _StripComments(self, line):
if self.skip_comments_during_readline:
for i in range(0, len(line)):
if ((line[i] in self.commenters) and
((i==0) or (line[i-1] not in self.escape))):
return line[:i]
return line
def _ReadLine(self,
recur_level=0):
"""
This function retrieves a block of text, halting at a
terminal character. Escape sequences are respected.
The self.lineno (newline counter) is also maintained.
The main difference between Readline and get_token()
is the way they handle the "self.source_triggers" member.
Both Readline() and get_token() insert text from other files when they
encounter a string in "self.source_triggers" in the text they read.
However ReadLine() ONLY inserts text from other files if the token which
matches with self.source_triggers appears at the beginning of the line.
get_token() inserts text only if lex.source matches the entire token.
comment-to-self:
At some point, once I'm sure this code is working, I should replace
shlex.get_token() with the code from ReadLine() which is more general.
It would be nice to get rid of "class LineLex" entirely. ReadLine()
is the only new feature that LineLex which was lacking in shlex.
To do this I would need to add a couple optional arguments to
"get_token()", allowing it to mimic ReadLine(), such as:
"override_wordterms" argument (which we can pass a '\n'), and
"token_extender" argument (like '\' for extending lines)
"""
first_token=''
line = ''
escaped_state = False
found_space = False
while True:
nextchar = self.read_char()
#sys.stderr.write('nextchar=\"'+nextchar+'\"\n')
while nextchar == '':
if not self.filestack:
return self._StripComments(line), '', first_token, found_space
else:
self.pop_source()
nextchar = self.read_char()
if nextchar == '\n':
self.lineno += 1
if escaped_state:
escaped_state = False
else:
if nextchar in self.escape:
line += nextchar
escaped_state = True
else:
escaped_state = False
if not escaped_state:
if (nextchar in self.whitespace):
found_space = True
while first_token in self.source_triggers:
fname = RemoveOuterQuotes(self.get_token())
if (fname == '') or (fname in self.source_triggers):
raise InputError('Error: near '+self.error_leader()+'\n'
' Nonsensical file inclusion request.\n')
if self.debug >= 0:
sys.stderr.write( (' ' * recur_level) +
'reading file \"'+fname+'\"\n')
spec = self.sourcehook(fname)
if spec:
(fname, subfile) = spec
if ((first_token not in self.source_triggers_x) or
(fname not in self.source_files_restricted)):
self.push_source(subfile, fname)
if first_token in self.source_triggers_x:
self.source_files_restricted.add(fname)
else:
if self.debug >= 0:
sys.stderr.write('\nWarning at '+self.error_leader()+':\n'
' duplicate attempt to import file:\n \"'+fname+'\"\n')
line, nextchar, first_token, found_space = \
self._ReadLine(recur_level+1)
if nextchar in self.line_terminators:
line_nrw = line.rstrip(self.whitespace)
#sys.stderr.write('line_nrw=\"'+line_nrw+'\"\n')
if ((len(line_nrw) > 0) and
(line_nrw[-1] in self.line_extend_chars) and
((len(line_nrw) < 2) or (line_nrw[-2] not in self.escape))):
line = line_nrw[:-1] #delete the line_extend character
# from the end of that line and keep reading...
else:
return self._StripComments(line), nextchar, first_token, found_space
else:
line += nextchar
if not found_space:
first_token += nextchar
def ReadLine(self, recur_level=0):
line, nextchar, first_token, found_space = \
self._ReadLine(recur_level)
if nextchar == self.eof:
self.end_encountered = True
return line + nextchar
@staticmethod
def TextBlock2Lines(text, delimiters, keep_delim=True):
""" This splits a string into a list of sub-strings split by delimiter
characters. This function is different from the standard str.split()
function: The string is split at every character which belongs to the
"delimiters" argument (which can be a string or some other container).
This character is included at the end of every substring. Example:
TextBlock2Lines('\nabc\nde^fg\nhi j\n', '^\n')
returns:
['\n', 'abc\n', 'de^', 'fg\n', 'hi j\n']
"""
ls = []
i = 0
i_prev = 0
while i < len(text):
if text[i] in delimiters:
if keep_delim:
ls.append(text[i_prev:i+1])
else:
ls.append(text[i_prev:i])
i_prev = i+1
i += 1
if (i_prev < len(text)):
ls.append(text[i_prev:i+1])
return ls
def __iter__(self):
return self
def __next__(self):
line = self.ReadLine()
if line == self.eof:
raise StopIteration
return line
class OSrcLoc(object):
""" OSrcLoc is barely more than a 2-tuple containing the name of a file
(a string) and a particular line number inside that file (an integer).
These objects are passed around and stored in the nodes of
every tree, so that if a syntax error or broken link in that node
is discovered, an error message can be provided to the user.
"order"
Later on, during development, the "order" member was added. Why:
If you want to know whether block of text comes before or after a
different block of text, unfortunately you can not just compare the
corresponding line numbers of the files they come from because the
files may differ, and multiple short blocks of text may occupy the
same line. Consequently, "OSrcLoc" also maintains an internal
counter which keeps track of how many OSrcLoc() objects have been
created so far. (This can be useful if the user requests that
variables and commands be assigned in a non-standard order.)
The "order" member is assigned to this counter.
Most of the time, the "order" member can be ignored.
"""
count = 0
def __init__(self, infile='', lineno=-1):
self.infile = infile
self.lineno = lineno
OSrcLoc.count += 1
self.order = OSrcLoc.count
def __lt__(self, x):
return self.order < x.order
#def __repr__(self):
# return repr((self.infile, self.lineno, self.order))
class TextBlock(object):
"""TextBlock is just a 3-tuple consisting of a string, and an OSrcLoc
to help locate it in the original file from which it was read."""
def __init__(self, text, srcloc): #srcloc_end):
self.text = text
if srcloc == None:
self.srcloc = OSrcLoc()
else:
self.srcloc = srcloc
#if srcloc_end == None:
# self.srcloc_end = OSrcLoc()
#else:
# self.srcloc_end = srcloc_end
def __repr__(self):
return '\"'+self.text+'\"'
class VarRef(object):
"""VarRef stores variable names, and paths, and other attribute information,
as well as a "OSrcLoc" to keep track of the file it was defined in."""
def __init__(self,
prefix = '', # '$' or '${'
descr_str = '', # <- descriptor string: "cpath/category:lpath"
suffix = '', # '}'
srcloc = None,# location in file where defined
binding = None,# a pointer to a tuple storing the value
nptr = None):# <- see class VarNPtr
self.prefix = prefix #Any text before the descriptor string goes here
self.suffix = suffix #Any text after the descriptor string goes here
self.descr_str = descr_str
if srcloc == None: # <- Location in text file where variable appears
self.srcloc = OSrcLoc()
else:
self.srcloc = srcloc
self.binding = binding
if nptr == None:
self.nptr = VarNPtr()
else:
self.nptr = nptr
def __lt__(self, x):
return self.order < x.order
#def __repr__(self):
# return repr((self.prefix + self.descr_str + self.suffix, srcloc))
class VarNPtr(object):
"""
Every time a variable appears in a template, it has has a "descritpor".
For example, consider the variable
"$atom:CA"
This is a string which encodes 3 pieces of information.
1) the category name: This is essentialy indicates the variable's type.
(ie "atom", in the example above)
2) the category node: Some TYPES have limited scope. Users can
specify the root node of the portion of the tree
in which this variable's type makes sense.
If this node is the root node, then that category
is relevant everywhere, and is not molecule or class
specific. All variables have a category node, which
is often not explicitly defined to by the user.
It must be inferred/determined.)
(Category node = the root "/", in the example above.)
3) the leaf node: This is a node whose ".name" member matches the name
of a variable. This node is created for this purpose
and it's position in the tree is a reflection of
that variable's intended scope.
In a molecule this "name" might be the name
of a type of atom, or an atom ID, or a bond type,
which is found in a particular molecule.
(Leaf node would be named "CA" in the example above.)
The VarNPtr class is simply a 3-tuple which
keeps these 3 pieces of data together.
"""
def __init__(self, cat_name='', cat_node=None, leaf_node=None):
self.cat_name = cat_name
self.cat_node = cat_node
self.leaf_node = leaf_node
#def __repr__(self):
# return repr((self.cat_name, self.cat_node.name, self.leaf_node.name))
class VarBinding(object):
""" VarBinding is essentially a tuple consistng of (full_name, binding, refs):
"self.full_name" is canonical name for this variable. This is a string
which specifies full path leading to the category node (beginning with '/'),
the category name (followed by a ':'),
as well as the leaf node (including the path leading up to it from cat_node)
This triplet identifies the variable uniquely.
"self.value" is the data that the variable refers to (usually a string).
"self.refs" stores a list of VarRefs which mention the same variable
from the various places inside various templates in the tree.
"""
def __init__(self,
full_name = '',
nptr = None,
value = None,
refs = None,
order = None,
category = None):
self.full_name = full_name
self.nptr = nptr
self.value = value
self.refs = refs
self.order = order
self.category = category
def __lt__(self, x):
return self.order < x.order
def __repr__(self):
return repr((self.full_name, self.value, self.order))
def ExtractCatName(descr_str):
""" When applied to a VarRef's "descr_str" member,
this function will extract the "catname" of it's corresponding
"nptr" member. This can be useful for error reporting.
(I use it to insure that the user is using the correct counter
variable types at various locations in their input files.)
"""
ib = descr_str.find(':')
if ib == -1:
ib = len(descr_str)
ia = descr_str.rfind('/')
if ia == -1:
ia = 0
return descr_str[ia:ib]
else:
str_before_colon = descr_str[0:ib]
ia = str_before_colon.rfind('/')
if ia == -1:
return str_before_colon
else:
return str_before_colon[ia+1:]
def _DeleteLineFromTemplate(tmpl_list,
i_entry, # index into tmpl_list
newline_delimiter='\n'):
""" Delete a single line from tmpl_list.
tmpl_list is an alternating list of VarRefs and TextBlocks.
To identify the line, the index corresponding to one of the
entries in the tmpl_list is used. (Usually it is a VarRef)
The text after the preceeding newline, and the text up to the next newline
(starting from the beginning of the current entry, if a TextBlock)
is deleted, including any VarRef (variables) located in between.
It returns the index corresponding to the next
entry in the list (after deletion).
"""
i_prev_newline = i_entry
while i_prev_newline >= 0:
entry = tmpl_list[i_prev_newline]
if isinstance(entry, TextBlock):
i_char_newline = entry.text.rfind(newline_delimiter)
if i_char_newline != -1: # then newline found
# Delete the text after this newline
entry.text = entry.text[:i_char_newline+1]
break
i_prev_newline -= 1
first_var = True
#i_next_newline = i_entry
i_next_newline = i_prev_newline+1
while i_next_newline < len(tmpl_list):
entry = tmpl_list[i_next_newline]
if isinstance(entry, TextBlock):
i_char_newline = entry.text.find(newline_delimiter)
if i_char_newline != -1: # then newline found
# Delete the text before this newline (including the newline)
entry.text = entry.text[i_char_newline+1:]
break
# Invoke DeleteSelf() on the first variables on this line. This will
# insure that it is deleted from the ttree_assignments.txt file.
elif isinstance(entry, VarRef):
if first_var:
entry.nptr.leaf_node.DeleteSelf()
first_var = False
i_next_newline += 1
del tmpl_list[i_prev_newline + 1 : i_next_newline]
return i_prev_newline + 1
def DeleteLinesWithBadVars(tmpl_list,
delete_entire_template = False,
newline_delimiter = '\n'):
"""
Loop through the entries in a template,
an alternating list of TextBlocks and VarRefs (tmpl_list).
If a VarRef points to a leaf_node which no longer exists
(ie. no longer in the corresponding category's .bindings list).
Then delete the line it came from from the template (tmpl_list).
"""
out_str_list = []
i = 0
while i < len(tmpl_list):
entry = tmpl_list[i]
if isinstance(entry, VarRef):
var_ref = entry
var_bindings = var_ref.nptr.cat_node.categories[var_ref.nptr.cat_name].bindings
#if var_ref.nptr.leaf_node not in var_bindings:
if var_ref.nptr.leaf_node.IsDeleted():
if delete_entire_template:
del tmpl_list[:]
return 0
else:
i = _DeleteLineFromTemplate(tmpl_list,
i,
newline_delimiter)
else:
i += 1
else:
i += 1
def SplitTemplate(ltmpl, delim, delete_blanks = False):
"""
Split a template "ltmpl" into a list of "tokens" (sub-templates)
using a single delimiter string "delim".
INPUT arguments:
"ltmpl" should be an list of TextBlocks and VarRefs.
"delim" should be a simple string (type str)
"delete_blanks" should be a boolean True/False value.
When true, successive occurrences of the delimiter
should not create blank entries in the output list.
OUTPUT:
A list of tokens.
Each "token" is either a TextBlock, a VarRef,
or a (flat, 1-dimensional) list containing more than one of these objects.
The number of "tokens" returned equals the number of times the delimiter
is encountered in any of the TextBlocks in the "ltmpl" argument, plus one.
(... Unless "delete_blanks" is set to True.
Again, in that case, empty entries in this list are deleted.)
"""
assert(type(delim) is str)
if not hasattr(ltmpl, '__len__'):
ltmpl = [ltmpl]
tokens_lltmpl = []
token_ltmpl = []
i = 0
while i < len(ltmpl):
entry = ltmpl[i]
if isinstance(entry, TextBlock):
#if hasattr(entry, 'text'):
prev_src_loc = entry.srcloc
tokens_str = entry.text.split(delim)
lineno = entry.srcloc.lineno
j = 0
while j < len(tokens_str):
token_str = tokens_str[j]
delim_found = False
if (j < len(tokens_str)-1):
delim_found = True
if token_str == '':
if delete_blanks:
if delim == '\n':
lineno += 1
if len(token_ltmpl) > 0:
if len(token_ltmpl) == 1:
tokens_lltmpl.append(token_ltmpl[0])
else:
tokens_lltmpl.append(token_ltmpl)
del token_ltmpl
token_ltmpl = []
j += 1
continue
new_src_loc = OSrcLoc(prev_src_loc.infile, lineno)
new_src_loc.order = prev_src_loc.order
for c in token_str:
# Reminder to self: c != delim (so c!='\n' if delim='\n')
# (We keep track of '\n' characters in delimiters above.)
if c == '\n':
lineno +=1
new_src_loc.lineno = lineno
text_block = TextBlock(token_str,
new_src_loc)
prev_src_loc = new_src_loc
if len(token_ltmpl) == 0:
if delim_found:
tokens_lltmpl.append(text_block)
del token_ltmpl
token_ltmpl = []
else:
token_ltmpl.append(text_block)
else:
if delim_found:
if len(token_str) > 0:
token_ltmpl.append(text_block)
tokens_lltmpl.append(token_ltmpl)
del token_ltmpl
token_ltmpl = []
else:
assert(not delete_blanks)
if (isinstance(token_ltmpl[-1], VarRef)
and
((j>0)
or
((j == len(tokens_str)-1) and
(i == len(ltmpl)-1))
)):
# In that case, this empty token_str corresponds
# to a delimiter which was located immediately
# after the variable name,
# AND
# -there is more text to follow,
# OR
# -we are at the end of the template.
token_ltmpl.append(text_block)
if len(token_ltmpl) == 1:
tokens_lltmpl.append(token_ltmpl[0])
else:
tokens_lltmpl.append(token_ltmpl)
del token_ltmpl
token_ltmpl = []
else:
token_ltmpl.append(text_block)
if (delim_found and (delim == '\n')):
lineno += 1
j += 1
elif isinstance(entry, VarRef):
#elif hasattr(entry, 'descr_str'):
lineno = entry.srcloc.lineno
if ((len(token_ltmpl) == 1) and
isinstance(token_ltmpl[0], TextBlock) and
(len(token_ltmpl[0].text) == 0)):
# special case: if the previous entry was "", then it means
# the delimeter appeared at the end of the previous text block
# leading up to this variable. It separates the variable from
# the previous text block. It is not a text block of length 0.
token_ltmpl[0] = entry
else:
token_ltmpl.append(entry)
elif entry == None:
token_ltmpl.append(entry)
else:
assert(False)
i += 1
# Append left over remains of the last token
if len(token_ltmpl) == 1:
tokens_lltmpl.append(token_ltmpl[0])
elif len(token_ltmpl) > 1:
tokens_lltmpl.append(token_ltmpl)
del token_ltmpl
return tokens_lltmpl
def SplitTemplateMulti(ltmpl, delims, delete_blanks=False):
"""
Split a template "ltmpl" into a list of templates using a
single one or more delimiter strings "delim_list".
If multiple delimiter strings are provided, splitting
begins using the first delimiter string in the list.
Then each token in the resulting list of templates
is split using the next delimiter string
and so on until we run out of delimiter strings.
"ltmpl" should be an list of TextBlocks and VarRefs.
"delims" should be a simple string (type str) or a list of strings
"delete_blanks" is either True or False
If True, then any blank entries in the resulting list of
tokens (sub-templates) will be deleted.
"""
if hasattr(delims, '__len__'): # then it hopefully is a list of strings
delim_list = delims
else:
delim_list = [delims] # then it hopefully is a string
tokens = [ltmpl]
for delim in delim_list:
assert(type(delim) is str)
tokens_il = []
for t in tokens:
sub_tokens = SplitTemplate(t, delim, delete_blanks)
for st in sub_tokens:
if hasattr(st, '__len__'):
if (len(st) > 0) or (not delete_blanks):
tokens_il.append(st)
else:
tokens_il.append(st)
tokens = tokens_il
del tokens_il
return tokens
def _TableFromTemplate(d, ltmpl, delimiters, delete_blanks):
"""
See the docstring for the TableFromTemplate() function for an explanation.
(This _TableFromTemplate() and SplitTemplate() are the workhorse functions
for TableFromTemplate().)
"""
output = SplitTemplateMulti(ltmpl, delimiters[d], delete_blanks[d])
if d > 0:
i = 0
while i < len(output):
output[i] = _TableFromTemplate(d-1,
output[i],
delimiters,
delete_blanks)
# Delete empty LISTS?
if (delete_blanks[d] and
hasattr(output[i], '__len__') and
(len(output[i]) == 0)):
del output[i]
else:
i += 1
return output
def TableFromTemplate(ltmpl, delimiters, delete_blanks=True):
"""
This function can be used to split a template
(a list containing TextBlocks and VarRefs) into a table
into a multidimensional table, with an arbitrary number of dimensions.
Arguments:
ltmpl
An alternating list of TextBlocks and VarRefs containing
the contents of this text template.
delimiters
The user must supply a list or tuple of delimiters: one delimiter for
each dimension in the table, with low-priority delimiters
(such as spaces ' ') appearing first, and higher-priority delimiters
(sich as newlines '\n') appearing later on in the list.
This function will divide the entire "ltmpl" into an n-dimensional
table. Initially the text is split into a list of text using the
highest-priority delimiter. Then each entry in the resulting list is
split into another list according to the next highest-priority delimiter.
This continues until all of the delimiters are used up and an
n-dimensional list-of-lists is remaining.
delete_blanks
The optional "delete_blanks" argument can be used to indicate whether
or not to delete blank entries in the table (which occur as a result
of placing two delimiters next to each other). It should be either
None (default), or it should be an array of booleans matching the
size of the "delimiters" argument. This allows the caller to customize
the merge settings separately for each dimension (for example: to allow
merging of whitespace within a line, without ignoring blank lines).
---- Details: ----
1) Multi-character delimiters ARE allowed (like '\n\n').
2) If a delimiter in the "delimiters" argument is not a string
but is a tuple (or a list) of strings, then the text is split according
to any of the delimiters in that tuple/list (starting from the last entry).
This way, users can use this feature to split text according to multiple
different kinds of whitespace characters (such as ' ' and '\t'), for
example, buy setting delimiters[0] = (' ','\t'). If, additionally,
delete_blanks[0] == True, then this will cause this function to
divide text in without regard to whitespace on a given line (for example).
Detailed example:
table2D = TableFromTmplList(ltmpl,
delimiters = ((' ','\t'), '\n'),
delete_blanks = (True, False))
This divides text in a similar way that the "awk" program does by default,
ie, by ignoring various kinds of whitespace between text fields, but NOT
ignoring blank lines.
3) Any text contained in variable-names is ignored.
"""
# Make a copy of ltmpl
# (The workhorse function "_TableFromTemplate()" makes in-place changes to
# its "ltmpl" argument. I don't want to modify "ltmpl", so I make a copy
# of it before I invoke "_TableFromTemplate()" on it.)
output = [ltmpl[i] for i in range(0, len(ltmpl))]
d = len(delimiters) - 1
output = _TableFromTemplate(d, output, delimiters, delete_blanks)
return output
class TemplateLexer(TtreeShlex):
""" This class extends the standard python lexing module, shlex, adding a
new member function (ReadTemplate()), which can read in a block of raw text,
(halting at an (non-escaped) terminal character), and split the text into
alternating blocks of text and variables. (As far as this lexer is
concerned, "variables" are simply tokens preceeded by $ or @ characters,
and surrounded by optional curly-brackets {}.)
"""
def __init__(self,
instream=None,
infile=None,
posix=False):
TtreeShlex.__init__(self, instream, infile, posix)
self.var_delim = '$@' #characters which can begin a variable name
self.var_open_paren = '{' #optional parenthesis surround a variable
self.var_close_paren = '}' #optional parenthesis surround a variable
self.newline = '\n'
self.comment_skip_var = '#'
# Which characters belong in words?
#
# We want to allow these characters:
# ./$@&%^!*~`-_:;?<>[]()
# to appear inside the tokens that TtreeShlex.get_token()
# retrieves (TtreeShlex.get_token() is used to read class
# names, and instance names, and variable names)
#
# settings.lex.wordchars+='./$@&%^!*~`-_+:;?<>[]' #Allow these chars
#
# Ommisions:
# Note: I left out quotes, whitespace, comment chars ('#'), and escape
# characters ('\\') because they are also dealt with separately.
# Those characters should not overlap with settings.lex.wordchars.
#
# Enabling unicode support requires that we override this choice
# by specifying "lex.wordterminators" instead of "wordchars".
#
# lex.wordterminators should be the (printable) set inverse of lex.wordchars
# I'm not sure which ascii characters are NOT included in the string above
# (We need to figure that out, and put them in settings.lex.wordterminators)
# To figure that out, uncomment the 8 lines below:
#
#self.wordterminators=''
#for i in range(0,256):
# c = chr(i)
# if c not in self.wordchars:
# self.wordterminators += c
#sys.stderr.write('-------- wordterminators = --------\n')
#sys.stderr.write(self.wordterminators+'\n')
#sys.stderr.write('-----------------------------------\n')
#
# Here is the result:
self.wordterminators = '(),={|}' + \
self.whitespace + \
self.quotes + \
self.escape + \
self.commenters
# Note:
# self.whitespace = ' \t\r\f\n'
# self.quotes = '\'"'
# self.escape = '\\'
# self.commenters = '#'
self.source_triggers=set(['include','import'])
self.source_triggers_x=set(['import'])
def GetSrcLoc(self):
return OSrcLoc(self.infile, self.lineno)
def ReadTemplate(self,
simplify_output=False,
terminators='}',
other_esc_chars='{',
keep_terminal_char = True):
"""
ReadTemplate() reads a block of text (between terminators)
and divides it into variables (tokens following a '$' or '@' character)
and raw text. This is similar to pythons string.Template(),
however it reads from streams (files), not strings, and it allows use
of more complicated variable names with multiple variable delimiters
(eg '$' and '@').
This readline()-like member function terminates when reaching a
user-specified terminator character character (second argument),
or when variable (eg: "$var"$ is encountered). The result is
a list of variable-separated text-blocks (stored in the first
argument). For example, the string:
"string with $var1 and $var2 variables.}" contains:
"string with ",
$var1,
" and ",
$var2,
" variables.}"
This simplifies the final process of rendering
(substituting text into) the text blocks later on.
Output:
This function returns a list of (alternating) blocks of
text, and variable names. Each entry in the list is either:
1) a text block:
Raw text is copied from the source, verbatim, along with
some additional data (filename and line numbers), to
help retroactively identify where the text came from
(in case a syntax error in the text is discovered later).
In this case, the list entry is stored as a list
The format (TextBlock) is similar to:
[text_string, ((filenameA,lineBegin), (filenameB,lineEnd))],
where the tuples, (filenameA,lineBegin) and (filenameB,lineEnd)
denote the source file(s) from which the text was read, and
line number at the beginning and ending of the text block.
(This information is useful for generating helpful error
messages. Note that the "TtreeShlex" class allows users to
combine multiple files transparently into one stream using
the "source" (or "sourcehook()") member. For this reason, it
is possible, although unlikely, that the text-block
we are reading could span multiple different files.)
2) a variable (for example "$var" or "${var}"):
In this case, the list entry is stored in the "VarRef" format
which is essentialy shown below:
[[var_prefix, var_nptr, var_suffix], (filename,lineno)]
where var_prefix and var_suffix are strings containing brackets
and other text enclosing the variable name (and may be empty).
As an example, we consider a file named "datafile" which
contains the text containing 2 text blocks and 1 variable:
"some\n text\n before ${var}. Text after\n".
ReadTemplate() will read this and return a list with 3 entries:
[ ['some\n text\n before', (('datafile', 1), ('datafile', 3))],
[['${', 'var', '}'], ('datafile', 3, 3)],
['Text after\n', (('datafile', 3), ('datafile', 4))] ]
Note that while parsing the text, self.lineno counter is
incremented whenever a newline character is encountered.
(Also: Unlike shlex.get_token(), this function does not
delete commented text, or insert text from other files.)
Exceptional Cases:
Terminator characters are ignored if they are part of a variable
reference. (For example, the '}' in "${var}", is used to denote a
bracketed variable, and does not cause ReadTemplate() to stop reading)
OR if they are part of a two-character escape sequence
(for example, '}' in "\}" does not cause terminate parsing).
In that case, the text is considered normal text. (However the
'\' character is also stripped out. It is also stripped out if it
preceeds any characters in "other_esc_chars", which is
the second argument. Otherwise it is left in the text block.)
"""
#print(' ReadTemplate('+terminators+') invoked at '+self.error_leader())
# The main loop of the parser reads only one variable at time.
# The following variables keep track of where we are in the template.
reading_var=False # Are we currently reading in the name of a variable?
prev_char_delim=False #True iff we just read a var_delim character like '$'
escaped_state=False #True iff we just read a (non-escaped) esc character '\'
commented_state=False #True iff we are in a region of text where vars should be ignored
var_paren_depth=0 # This is non-zero iff we are inside a
# bracketed variable's name for example: "${var}"
var_terminators = self.whitespace + self.newline + self.var_delim + '{}'
tmpl_list = [] # List of alternating tuples of text_blocks and
# variable names (see format comment above)
# This list will be returned to the caller.
#sys.stderr.write('report_progress='+str(report_progress))
prev_filename = self.infile
prev_lineno = self.lineno
var_prefix = ''
var_descr_plist = []
var_suffix = ''
text_block_plist = []
done_reading = False
while not done_reading:
terminate_text = False
terminate_var = False
#delete_prior_escape = False
nextchar = self.read_char()
#print(' ReadTemplate() nextchar=\''+nextchar+'\' at '+self.error_leader()+' esc='+str(escaped_state)+', pvar='+str(prev_char_delim)+', paren='+str(var_paren_depth))
# Count newlines:
if nextchar in self.newline:
commented_state = False
self.lineno += 1
elif ((nextchar in self.comment_skip_var) and
(not escaped_state)):
commented_state = True
# Check for end-of-file:
if nextchar == '':
if escaped_state:
raise InputError('Error: in '+self.error_leader()+'\n\n'
'No escaped character.')
if reading_var:
terminate_var = True
else:
terminate_text = True
done_reading = True
# --- Now process the character: ---
# What we do next depends on which "mode" we are in.
# If we are reading a regular text block (reading_var == False),
# then we keep appending characters onto the end of "text_block",
# checking for terminal characters, or variable delimiters.
# If we are reading a variable name (reading_var == True),
# then we append characters to the end of "var_descr_plist[]",
# checking for variable terminator characters, as well as
# parenthesis (some variables are surrounded by parenthesis).
elif reading_var:
if nextchar in terminators:
#sys.stdout.write(' ReadTemplate() readmode found terminator.\n')
if escaped_state:
# In this case, the '\' char was only to prevent terminating
# string prematurely, so delete the '\' character.
#delete_prior_escape = True
if not (nextchar in self.var_close_paren):
del var_descr_plist[-1]
var_descr_plist.append(nextchar)
elif not ((var_paren_depth>0) and (nextchar in self.var_close_paren)):
terminate_var = True
done_reading = True
if nextchar in self.var_open_paren: # eg: nextchar == '{'
#sys.stdout.write(' ReadTemplate() readmode found {.\n')
if escaped_state:
# In this case, the '\' char was only to prevent
# interpreting '{' as a variable prefix
#delete_prior_escape=True # so delete the '\' character
del var_descr_plist[-1]
var_descr_plist.append(nextchar)
else:
# "${var}" is a valid way to refer to a variable
if prev_char_delim:
var_prefix += nextchar
var_paren_depth = 1
# "${{var}}" is also a valid way to refer to a variable,
# (although strange), but "$va{r}" is not.
# Parenthesis (in bracketed variable names) must
# immediately follow the '$' character (as in "${var}")
elif var_paren_depth > 0:
var_paren_depth += 1
elif nextchar in self.var_close_paren:
#sys.stdout.write(' ReadTemplate() readmode found }.\n')
if escaped_state:
# In this case, the '\' char was only to prevent
# interpreting '}' as a variable suffix,
#delete_prior_escape=True #so skip the '\' character
if (nextchar not in terminators):
del var_descr_plist[-1]
var_descr_plist.append(nextchar)
else:
if var_paren_depth > 0:
var_paren_depth -= 1
if var_paren_depth == 0:
var_suffix = nextchar
terminate_var = True
elif nextchar in var_terminators:
#sys.stdout.write(' ReadTemplate() readmode found var_terminator \"'+nextchar+'\"\n')
if (escaped_state or (var_paren_depth>0)):
# In this case, the '\' char was only to prevent
# interpreting nextchar as a variable terminator
#delete_prior_escape = True # so skip the '\' character
del var_descr_plist[-1]
var_descr_plist.append(nextchar)
else:
terminate_var = True
elif nextchar in self.var_delim: # such as '$'
#sys.stdout.write(' ReadTemplate() readmode found var_delim.\n')
if escaped_state:
# In this case, the '\' char was only to prevent
# interpreting '$' as a new variable name
#delete_prior_escape = True # so skip the '\' character
del var_descr_plist[-1]
var_descr_plist.append(nextchar)
else:
prev_var_delim = True
# Then we are processing a new variable name
terminate_var = True
else:
var_descr_plist.append(nextchar)
prev_char_delim = False
else: # begin else clause for "if reading_var:"
# Then we are reading a text_block
if nextchar in terminators:
if escaped_state:
# In this case, the '\' char was only to prevent terminating
# string prematurely, so delete the '\' character.
#delete_prior_escape = True
del text_block_plist[-1]
text_block_plist.append(nextchar)
elif commented_state:
text_block_plist.append(nextchar)
else:
terminate_text = True
done_reading = True
elif nextchar in self.var_delim: # such as '$'
if escaped_state:
# In this case, the '\' char was only to prevent
# interpreting '$' as a variable prefix.
#delete_prior_escape=True #so delete the '\' character
del text_block_plist[-1]
text_block_plist.append(nextchar)
elif commented_state:
text_block_plist.append(nextchar)
else:
prev_char_delim = True
reading_var = True
var_paren_depth = 0
terminate_text = True
else:
text_block_plist.append(nextchar)
#TO DO: use "list_of_chars.join()" instead of '+='
prev_char_delim = False # the previous character was not '$'
# Now deal with "other_esc_chars"
#if escaped_state and (nextchar in other_esc_chars):
if escaped_state and (nextchar in other_esc_chars):
if reading_var:
#sys.stdout.write(' ReadTemplate: var_descr_str=\''+''.join(var_descr_plist)+'\'\n')
assert(var_descr_plist[-2] in self.escape)
del var_descr_plist[-2]
else:
#sys.stdout.write(' ReadTemplate: text_block=\''+''.join(text_block_plist)+'\'\n')
assert(text_block_plist[-2] in self.escape)
del text_block_plist[-2]
if terminate_text:
#sys.stdout.write('ReadTemplate() appending: ')
#sys.stdout.write(text_block)
#tmpl_list.append( [text_block,
# ((prev_filename, prev_lineno),
# (self.infile, self.lineno))] )
if simplify_output:
tmpl_list.append(''.join(text_block_plist))
else:
tmpl_list.append(TextBlock(''.join(text_block_plist),
OSrcLoc(prev_filename, prev_lineno)))
#, OSrcLoc(self.infile, self.lineno)))
if not done_reading:
# The character that ended the text block
# was a variable delimiter (like '$'), in which case
# we should put it (nextchar) in the variable's prefix.
var_prefix = nextchar
else:
var_prefix = ''
var_descr_plist = []
var_suffix = ''
prev_filename = self.infile
prev_lineno = self.lineno
del text_block_plist
text_block_plist = []
#gc.collect()
elif terminate_var:
# Print an error if we terminated in the middle of
# an incomplete variable name:
if prev_char_delim:
raise InputError('Error: near '+self.error_leader()+'\n\n'
'Null variable name.')
if var_paren_depth > 0:
raise InputError('Error: near '+self.error_leader()+'\n\n'
'Incomplete bracketed variable name.')
var_descr_str = ''.join(var_descr_plist)
# Now check for variable format modifiers,
# like python's ".rjust()" and ".ljust()".
# If present, then put these in the variable suffix.
if ((len(var_descr_plist)>0) and (var_descr_plist[-1]==')')):
#i = len(var_descr_plist)-1
#while i >= 0:
# if var_descr_plist[i] == '(':
# break
# i -= 1
i = var_descr_str.rfind('(')
if (((i-6) >= 0) and
((var_descr_str[i-6:i] == '.rjust') or
(var_descr_str[i-6:i] == '.ljust'))):
var_suffix =''.join(var_descr_plist[i-6:])+var_suffix
#var_descr_plist = var_descr_plist[:i-6]
var_descr_str = var_descr_str[:i-6]
# Process any special characters in the variable name
var_descr_str = EscCharStrToChar(var_descr_str)
#tmpl_list.append( [[var_prefix, var_descr_str, var_suffix],
# (self.infile, self.lineno)] )
if simplify_output:
tmpl_list.append(var_prefix + var_descr_str + var_suffix)
else:
tmpl_list.append( VarRef(var_prefix, var_descr_str, var_suffix,
OSrcLoc(self.infile, self.lineno)) )
#if report_progress:
#sys.stderr.write(' parsed variable '+var_prefix+var_descr_str+var_suffix+'\n')
#sys.stdout.write('ReadTemplate() appending: ')
#print(var_prefix + var_descr_str + var_suffix)
del var_descr_plist
del var_descr_str
prev_filename = self.infile
prev_lineno = self.lineno
var_prefix = ''
var_descr_plist = []
var_suffix = ''
# Special case: Variable delimiters like '$'
# terminate the reading of variables,
# but they also signify that a new
# variable is being read.
if nextchar in self.var_delim:
# Then we are processing a new variable name
prev_var_delim = True
reading_var = True
var_paren_depth = 0
var_prefix = nextchar
elif nextchar in self.var_close_paren:
del text_block_plist
text_block_plist = []
#gc.collect()
prev_var_delim = False
reading_var = False
else:
# Generally, we don't want to initialize the next text block
# with the empty string. Consider that whatever character
# caused us to stop reading the previous variable and append
# it to the block of text that comes after.
del text_block_plist
text_block_plist = [nextchar]
#gc.collect()
prev_var_delim = False
reading_var = False
# If we reached the end of the template (and the user requests it),
# then the terminal character can be included in the list
# of text_blocks to be returned to the caller.
if done_reading and keep_terminal_char:
#sys.stdout.write('ReadTemplate() appending: \''+nextchar+'\'\n')
# Here we create a new text block which contains only the
# terminal character (nextchar).
#tmpl_list.append( [nextchar,
# ((self.infile, self.lineno),
# (self.infile, self.lineno))] )
if simplify_output:
tmpl_list.append(nextchar)
else:
tmpl_list.append(TextBlock(nextchar,
OSrcLoc(self.infile, self.lineno)))
#, OSrcLoc(self.infile, self.lineno)))
if escaped_state:
escaped_state = False
else:
if nextchar in self.escape:
escaped_state = True
#print("*** TMPL_LIST0 = ***", tmpl_list)
return tmpl_list # <- return value stored here
def GetParenExpr(self, prepend_str='', left_paren='(', right_paren=')'):
""" GetParenExpr() is useful for reading in strings
with nested parenthesis and spaces.
This function can read in the entire string:
.trans(0, 10.0*sin(30), 10.0*cos(30))
(Because I was too lazy to write this correctly...)
Spaces are currently stripped out of the expression.
(...unless surrounded by quotes) The string above becomes:
".trans(0,10.0*sin(30),10.0*cos(30))"
Sometimes the caller wants to prepend some text to the beginning
of the expression (which may contain parenthesis). For this
reason, an optional first argument ("prepend_str") can be
provided. By default it is empty.
"""
orig_wordterm = self.wordterminators
self.wordterminators = self.wordterminators.replace(left_paren,'').replace(right_paren,'')
token = self.get_token()
if ((token == '') or
(token == self.eof)):
return prepend_str
expr_str = prepend_str + token
#if (expr_str.find(left_paren) == -1):
# raise InputError('Error near or before '+self.error_leader()+'\n'
# 'Expected an open-paren (\"'+prepend_str+left_paren+'\") before this point.\n')
# return expr_str
paren_depth = expr_str.count(left_paren) - expr_str.count(right_paren)
while ((len(expr_str) == 0) or (paren_depth > 0)):
token = self.get_token()
if ((type(token) is not str) or
(token == '')):
raise InputError('Error near or before '+self.error_leader()+'\n'
'Invalid expression: \"'+expr_str+'\"')
expr_str += token
paren_depth = expr_str.count(left_paren) - expr_str.count(right_paren)
if (paren_depth != 0):
raise InputError('Error near or before '+self.error_leader()+'\n'
'Invalid expression: \"'+expr_str+'\"')
self.wordterminators = orig_wordterm
return expr_str
if __name__ == '__main__':
if len(sys.argv) == 1:
lexer = TtreeShlex()
else:
file = sys.argv[1]
lexer = TtreeShlex(open(file), file)
while 1:
tt = lexer.get_token()
if tt:
print("Token: " + repr(tt))
else:
break<|fim▁end|> | Andrew 2012-3-25 |
<|file_name|>复件 game_spider.py<|end_file_name|><|fim▁begin|>
<|fim▁hole|>
if __name__ == "__main__":
print u'开始爬取咨询信息...'
# 设置资讯的存储位置,必须以 \\ 结尾,分为绝对路径和相对路径
# c:\资讯\\ C:\咨询 目录下存放 txt,c:\资讯\图片 目录下存放图片
# 咨询\ 程序当前文件夹下的 资讯 目录存储 txt, 里面的 图片目录存放图片
news_path = u'c:\资讯\\'
# 设置视频目录,同上
video_path = u'c:\视频\\'
print u'游戏资讯的存放路径是:' + news_path
a = news_spider(path = news_path)
a.run()
print u'开始爬取视频信息...'
print u'视频信息的的存放路径是:' + video_path
a = video_spider(path = video_path)
a.run()
pass<|fim▁end|> |
from news_spider import news_spider
from video_spider import video_spider |
<|file_name|>test_name_id.py<|end_file_name|><|fim▁begin|># Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for volume name_id."""
from oslo.config import cfg
from cinder import context
from cinder import db
from cinder import test
from cinder.tests import utils as testutils
CONF = cfg.CONF
class NameIDsTestCase(test.TestCase):
"""Test cases for naming volumes with name_id."""
def setUp(self):
super(NameIDsTestCase, self).setUp()
self.ctxt = context.RequestContext(user_id='user_id',
project_id='project_id')
def tearDown(self):
super(NameIDsTestCase, self).tearDown()
def test_name_id_same(self):
"""New volume should have same 'id' and 'name_id'."""
vol_ref = testutils.create_volume(self.ctxt, size=1)
self.assertEqual(vol_ref['name_id'], vol_ref['id'])
expected_name = CONF.volume_name_template % vol_ref['id']
self.assertEqual(vol_ref['name'], expected_name)
def test_name_id_diff(self):
"""Change name ID to mimic volume after migration."""
vol_ref = testutils.create_volume(self.ctxt, size=1)
db.volume_update(self.ctxt, vol_ref['id'], {'name_id': 'fake'})
vol_ref = db.volume_get(self.ctxt, vol_ref['id'])
expected_name = CONF.volume_name_template % 'fake'
self.assertEqual(vol_ref['name'], expected_name)
def test_name_id_snapshot_volume_name(self):
"""Make sure snapshot['volume_name'] is updated."""
vol_ref = testutils.create_volume(self.ctxt, size=1)<|fim▁hole|> self.assertEqual(snap_ref['volume_name'], expected_name)<|fim▁end|> | db.volume_update(self.ctxt, vol_ref['id'], {'name_id': 'fake'})
snap_ref = testutils.create_snapshot(self.ctxt, vol_ref['id'])
expected_name = CONF.volume_name_template % 'fake' |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var expect = require("chai").expect;
var MoodleExporter = require('../../MoodleExporter');
var xml2js = require('xml2js');
describe('MoodleExporter', function () {
var validSeed = 'abcd1234';
var validQD = {
"version": "0.1",
"questions": [{
"question": "mc-change-of-base",
"repeat": 2,
}]
};
describe('generateMoodleXML(qd, seed)', function () {
describe('throwing errors', function () {
describe('when qd is invalid problemType', function () {
it('should throw an error.', function () {
expect(function () {
MoodleExporter.generateMoodleXML({ "invalid": "qd" }, validSeed)
}).to.throw(Error);
});
});
describe('invalid seed', function () {
it("should throw an error", function () {
expect(function () {
MoodleExporter.generateMoodleXML(validQD, "invalid-seed");
}).to.throw(Error);
});
});
});
describe('successful conversion', function () {
var xmlResult, xmlString;
describe('general requirements', function () {
beforeEach(function (done) {
try {
xmlString = MoodleExporter.generateMoodleXML(validQD, validSeed);
} catch(e) {
console.log(e);
throw e;
}
xml2js.parseString(xmlString, function (err, result) {<|fim▁hole|> });
describe('xml quiz tag', function () {
it('should set the quiz tag', function () {
expect(xmlResult.quiz).to.exist;
});
});
describe('xml question', function () {
it('should have the # of questions specified by the count parameter', function () {
expect(xmlResult.quiz.question.length).to.equal(2);
});
});
});
describe('multiple problemTypes', function () {
beforeEach(function (done) {
var qd = {
"version": "0.1",
"questions": [{
"question": "mc-change-of-base",
"repeat": 1,
},
{
"question": "fr-change-of-base",
"repeat": 1,
}]
};
xmlString = MoodleExporter.generateMoodleXML(qd, validSeed);
xml2js.parseString(xmlString, function (err, result) {
xmlResult = result;
done();
});
});
describe('first question title', function () {
it('should be Change of Base Multiple Choice', function () {
expect(xmlResult.quiz.question[0].name[0].text[0]).to.equal("Change of Base Multiple Choice");
});
});
describe('second question title', function () {
it('should be Change of Base Free Response', function () {
expect(xmlResult.quiz.question[1].name[0].text[0]).to.equal("Change of Base Free Response");
});
});
});
describe('different formats', function () {
describe('multiple choice format', function () {
beforeEach(function (done) {
problemTypeRequested = 'mc-change-of-base';
count = 2;
questionName = 'Sample Question Name';
var qd = {
"version": "0.1",
"questions": [{
"question": problemTypeRequested,
"repeat": count,
}]
};
xmlString = MoodleExporter.generateMoodleXML(qd, validSeed);
xml2js.parseString(xmlString, function (err, result) {
xmlResult = result;
done();
});
});
//TODO: fix output type references -- previously may have been question type MC vs free response
describe('xml output type', function () {
it('should have set the output type attribute to multichoice', function () {
for (var i = 0; xmlResult.quiz.question.length > i; i++)
expect(xmlResult.quiz.question[i].$.type).to.equal('multichoice');
});
});
describe('xml question title', function () {
it('should be Change of Base Multiple Choice', function () {
expect(xmlResult.quiz.question[0].name[0].text[0]).to.equal("Change of Base Multiple Choice");
});
});
});
describe('input format', function () {
beforeEach(function (done) {
problemTypeRequested = 'fr-change-of-base';
count = 2;
questionName = 'Sample Question Name';
var qd = {
"version": "0.1",
"questions": [{
"question": problemTypeRequested,
"repeat": count,
}]
};
xmlString = MoodleExporter.generateMoodleXML(qd, validSeed);
xml2js.parseString(xmlString, function (err, result) {
xmlResult = result;
done();
});
});
//TODO: fix output type references -- previously may have been question type MC vs free response
//TODO: shortanswer vs free-response
describe('xml output type property', function () {
it('should have set the output type attribute to shortanswer', function () {
for (var i = 0; xmlResult.quiz.question.length > i; i++)
expect(xmlResult.quiz.question[i].$.type).to.equal('shortanswer');
});
});
describe('xml question title', function () {
it('should be Change of Base Free Response', function () {
expect(xmlResult.quiz.question[0].name[0].text[0]).to.equal("Change of Base Free Response");
});
});
describe('question.answer.text', function () {
it('should exist', function () {
expect(xmlResult.quiz.question[0].answer[0].text).to.exist;
});
});
});
});
});
});
});<|fim▁end|> | xmlResult = result;
done();
}); |
<|file_name|>fmt.go<|end_file_name|><|fim▁begin|><|fim▁hole|>
import (
"bytes"
"github.com/influxdata/kapacitor/tick/ast"
)
// Formats a TICKscript according to the standard.
func Format(script string) (string, error) {
root, err := ast.Parse(script)
if err != nil {
return "", err
}
var buf bytes.Buffer
buf.Grow(len(script))
root.Format(&buf, "", false)
return buf.String(), nil
}<|fim▁end|> | package tick |
<|file_name|>ShapeGraphicAttribute.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Ilya S. Okomin
* @version $Revision$
*/
package java.awt.font;
import java.awt.BasicStroke;
import java.awt.Graphics2D;
import java.awt.Shape;
import java.awt.Stroke;
import java.awt.geom.AffineTransform;
import java.awt.geom.Rectangle2D;
import org.apache.harmony.misc.HashCode;
public final class ShapeGraphicAttribute extends GraphicAttribute {
// shape to render
private Shape fShape;
// flag, if the shape should be stroked (true) or filled (false)
private boolean fStroke;
// bounds of the shape
private Rectangle2D fBounds;
// X coordinate of the origin point
private float fOriginX;
// Y coordinate of the origin point
private float fOriginY;
// width of the shape
private float fShapeWidth;
// height of the shape
private float fShapeHeight;
public static final boolean STROKE = true;
public static final boolean FILL = false;
public ShapeGraphicAttribute(Shape shape, int alignment, boolean stroke) {
super(alignment);
this.fShape = shape;
this.fStroke = stroke;
this.fBounds = fShape.getBounds2D();
this.fOriginX = (float)fBounds.getMinX();
this.fOriginY = (float)fBounds.getMinY();
this.fShapeWidth = (float)fBounds.getWidth();
this.fShapeHeight = (float)fBounds.getHeight();
}
@Override
public int hashCode() {
HashCode hash = new HashCode();
hash.append(fShape.hashCode());
hash.append(getAlignment());
return hash.hashCode();
}
public boolean equals(ShapeGraphicAttribute sga) {
if (sga == null) {
return false;
}
if (sga == this) {
return true;
}
return ( fStroke == sga.fStroke &&
getAlignment() == sga.getAlignment() &&
fShape.equals(sga.fShape));
}
@Override
public boolean equals(Object obj) {
try {
return equals((ShapeGraphicAttribute) obj);
}
catch(ClassCastException e) {
return false;
}
}
@Override
public void draw(Graphics2D g2, float x, float y) {
AffineTransform at = AffineTransform.getTranslateInstance(x, y);
if (fStroke == STROKE){
Stroke oldStroke = g2.getStroke();
g2.setStroke(new BasicStroke());
g2.draw(at.createTransformedShape(fShape));<|fim▁hole|> } else {
g2.fill(at.createTransformedShape(fShape));
}
}
@Override
public float getAdvance() {
return Math.max(0, fShapeWidth + fOriginX);
}
@Override
public float getAscent() {
return Math.max(0, -fOriginY);
}
@Override
public Rectangle2D getBounds() {
return (Rectangle2D)fBounds.clone();
}
@Override
public float getDescent() {
return Math.max(0, fShapeHeight + fOriginY);
}
}<|fim▁end|> | g2.setStroke(oldStroke); |
<|file_name|>ledstart.py<|end_file_name|><|fim▁begin|>import RPi.GPIO as GPIO
import time
led = 11
GPIO.setmode(GPIO.BOARD)
GPIO.setup(led,GPIO.OUT)
#for x in range(0,100):
GPIO.output(led,True)
time.sleep(0.5)
GPIO.output(led,False)
time.sleep(0.5)
GPIO.output(led,True)
time.sleep(0.5)
GPIO.output(led,False)
time.sleep(0.5)
GPIO.output(led,True)<|fim▁hole|>GPIO.output(led,False)
time.sleep(0.2)
GPIO.output(led,True)
time.sleep(0.2)
GPIO.output(led,False)
time.sleep(0.2)
GPIO.output(led,True)
time.sleep(0.2)
GPIO.output(led,False)
time.sleep(0.2)
# time.sleep(1)
# GPIO.output(led,False)
# time.sleep(1)
#GPIO.cleanup()
#GPIO.output(led,False)<|fim▁end|> | time.sleep(0.2) |
<|file_name|>test_unicode.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from itertools import product
from itertools import permutations
from numba import njit
from numba.core import types, utils
import unittest
from numba.tests.support import (TestCase, no_pyobj_flags, MemoryLeakMixin)
from numba.core.errors import TypingError, UnsupportedError
from numba.cpython.unicode import _MAX_UNICODE
from numba.core.types.functions import _header_lead
from numba.extending import overload
_py37_or_later = utils.PYVERSION >= (3, 7)
def isascii(s):
return all(ord(c) < 128 for c in s)
def literal_usecase():
return '大处着眼,小处着手。'
def passthrough_usecase(x):
return x
def eq_usecase(x, y):
return x == y
def len_usecase(x):
return len(x)
def bool_usecase(x):
return bool(x)
def getitem_usecase(x, i):
return x[i]
def getitem_check_kind_usecase(x, i):
return hash(x[i])
def zfill_usecase(x, y):
return x.zfill(y)
def concat_usecase(x, y):
return x + y
def repeat_usecase(x, y):
return x * y
def inplace_concat_usecase(x, y):
x += y
return x
def in_usecase(x, y):
return x in y
def lt_usecase(x, y):
return x < y
def le_usecase(x, y):
return x <= y
def gt_usecase(x, y):
return x > y
def ge_usecase(x, y):
return x >= y
def partition_usecase(s, sep):
return s.partition(sep)
def find_usecase(x, y):
return x.find(y)
def find_with_start_only_usecase(x, y, start):
return x.find(y, start)
def find_with_start_end_usecase(x, y, start, end):
return x.find(y, start, end)
def rpartition_usecase(s, sep):
return s.rpartition(sep)
def count_usecase(x, y):
return x.count(y)
def count_with_start_usecase(x, y, start):
return x.count(y, start)
def count_with_start_end_usecase(x, y, start, end):
return x.count(y, start, end)
def rfind_usecase(x, y):
return x.rfind(y)
def rfind_with_start_only_usecase(x, y, start):
return x.rfind(y, start)
def rfind_with_start_end_usecase(x, y, start, end):
return x.rfind(y, start, end)
def replace_usecase(s, x, y):
return s.replace(x, y)
def replace_with_count_usecase(s, x, y, count):
return s.replace(x, y, count)
def rindex_usecase(x, y):
return x.rindex(y)
def rindex_with_start_only_usecase(x, y, start):
return x.rindex(y, start)
def rindex_with_start_end_usecase(x, y, start, end):
return x.rindex(y, start, end)
def index_usecase(x, y):
return x.index(y)
def index_with_start_only_usecase(x, y, start):
return x.index(y, start)
def index_with_start_end_usecase(x, y, start, end):
return x.index(y, start, end)
def startswith_usecase(x, y):
return x.startswith(y)
def endswith_usecase(x, y):
return x.endswith(y)
def expandtabs_usecase(s):
return s.expandtabs()
def expandtabs_with_tabsize_usecase(s, tabsize):
return s.expandtabs(tabsize)
def expandtabs_with_tabsize_kwarg_usecase(s, tabsize):
return s.expandtabs(tabsize=tabsize)
def endswith_with_start_only_usecase(x, y, start):
return x.endswith(y, start)
def endswith_with_start_end_usecase(x, y, start, end):
return x.endswith(y, start, end)
def split_usecase(x, y):
return x.split(y)
def split_with_maxsplit_usecase(x, y, maxsplit):
return x.split(y, maxsplit)
def split_with_maxsplit_kwarg_usecase(x, y, maxsplit):
return x.split(y, maxsplit=maxsplit)
def split_whitespace_usecase(x):
return x.split()
def splitlines_usecase(s):
return s.splitlines()
def splitlines_with_keepends_usecase(s, keepends):
return s.splitlines(keepends)
def splitlines_with_keepends_kwarg_usecase(s, keepends):
return s.splitlines(keepends=keepends)
<|fim▁hole|>def rsplit_usecase(s, sep):
return s.rsplit(sep)
def rsplit_with_maxsplit_usecase(s, sep, maxsplit):
return s.rsplit(sep, maxsplit)
def rsplit_with_maxsplit_kwarg_usecase(s, sep, maxsplit):
return s.rsplit(sep, maxsplit=maxsplit)
def rsplit_whitespace_usecase(s):
return s.rsplit()
def lstrip_usecase(x):
return x.lstrip()
def lstrip_usecase_chars(x, chars):
return x.lstrip(chars)
def rstrip_usecase(x):
return x.rstrip()
def rstrip_usecase_chars(x, chars):
return x.rstrip(chars)
def strip_usecase(x):
return x.strip()
def strip_usecase_chars(x, chars):
return x.strip(chars)
def join_usecase(x, y):
return x.join(y)
def join_empty_usecase(x):
# hack to make empty typed list
l = ['']
l.pop()
return x.join(l)
def center_usecase(x, y):
return x.center(y)
def center_usecase_fillchar(x, y, fillchar):
return x.center(y, fillchar)
def ljust_usecase(x, y):
return x.ljust(y)
def ljust_usecase_fillchar(x, y, fillchar):
return x.ljust(y, fillchar)
def rjust_usecase(x, y):
return x.rjust(y)
def rjust_usecase_fillchar(x, y, fillchar):
return x.rjust(y, fillchar)
def istitle_usecase(x):
return x.istitle()
def iter_usecase(x):
l = []
for i in x:
l.append(i)
return l
def title(x):
return x.title()
def literal_iter_usecase():
l = []
for i in '大处着眼,小处着手。':
l.append(i)
return l
def enumerated_iter_usecase(x):
buf = ""
scan = 0
for i, s in enumerate(x):
buf += s
scan += 1
return buf, scan
def iter_stopiteration_usecase(x):
n = len(x)
i = iter(x)
for _ in range(n + 1):
next(i)
def literal_iter_stopiteration_usecase():
s = '大处着眼,小处着手。'
i = iter(s)
n = len(s)
for _ in range(n + 1):
next(i)
def islower_usecase(x):
return x.islower()
def lower_usecase(x):
return x.lower()
def ord_usecase(x):
return ord(x)
def chr_usecase(x):
return chr(x)
class BaseTest(MemoryLeakMixin, TestCase):
def setUp(self):
super(BaseTest, self).setUp()
UNICODE_EXAMPLES = [
'',
'ascii',
'12345',
'1234567890',
'¡Y tú quién te crees?',
'🐍⚡',
'大处着眼,小处着手。',
]
UNICODE_ORDERING_EXAMPLES = [
'',
'a'
'aa',
'aaa',
'b',
'aab',
'ab',
'asc',
'ascih',
'ascii',
'ascij',
'大处着眼,小处着手',
'大处着眼,小处着手。',
'大处着眼,小处着手。🐍⚡',
]
UNICODE_COUNT_EXAMPLES = [
('', ''),
('', 'ascii'),
('ascii', ''),
('asc ii', ' '),
('ascii', 'ci'),
('ascii', 'ascii'),
('ascii', 'Ă'),
('ascii', '大处'),
('ascii', 'étú?'),
('', '大处 着眼,小处着手。大大大处'),
('大处 着眼,小处着手。大大大处', ''),
('大处 着眼,小处着手。大大大处', ' '),
('大处 着眼,小处着手。大大大处', 'ci'),
('大处 着眼,小处着手。大大大处', '大处大处'),
('大处 着眼,小处着手。大大大处', '大处 着眼,小处着手。大大大处'),
('大处 着眼,小处着手。大大大处', 'Ă'),
('大处 着眼,小处着手。大大大处', '大处'),
('大处 着眼,小处着手。大大大处', 'étú?'),
('', 'tú quién te crees?'),
('tú quién te crees?', ''),
('tú quién te crees?', ' '),
('tú quién te crees?', 'ci'),
('tú quién te crees?', 'tú quién te crees?'),
('tú quién te crees?', 'Ă'),
('tú quién te crees?', '大处'),
('tú quién te crees?', 'étú?'),
('abababab', 'a'),
('abababab', 'ab'),
('abababab', 'aba'),
('aaaaaaaaaa', 'aaa'),
('aaaaaaaaaa', 'aĂ'),
('aabbaaaabbaa', 'aa')
]
class TestUnicode(BaseTest):
def test_literal(self, flags=no_pyobj_flags):
pyfunc = literal_usecase
self.run_nullary_func(pyfunc, flags=flags)
def test_passthrough(self, flags=no_pyobj_flags):
pyfunc = passthrough_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
self.assertEqual(pyfunc(s), cfunc(s))
def test_eq(self, flags=no_pyobj_flags):
pyfunc = eq_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
for b in reversed(UNICODE_EXAMPLES):
self.assertEqual(pyfunc(a, b),
cfunc(a, b), '%s, %s' % (a, b))
# comparing against something that's not unicode
self.assertEqual(pyfunc(a, 1),
cfunc(a, 1), '%s, %s' % (a, 1))
self.assertEqual(pyfunc(1, b),
cfunc(1, b), '%s, %s' % (1, b))
def _check_ordering_op(self, usecase):
pyfunc = usecase
cfunc = njit(pyfunc)
# Check comparison to self
for a in UNICODE_ORDERING_EXAMPLES:
self.assertEqual(
pyfunc(a, a),
cfunc(a, a),
'%s: "%s", "%s"' % (usecase.__name__, a, a),
)
# Check comparison to adjacent
for a, b in permutations(UNICODE_ORDERING_EXAMPLES, r=2):
self.assertEqual(
pyfunc(a, b),
cfunc(a, b),
'%s: "%s", "%s"' % (usecase.__name__, a, b),
)
# and reversed
self.assertEqual(
pyfunc(b, a),
cfunc(b, a),
'%s: "%s", "%s"' % (usecase.__name__, b, a),
)
def test_lt(self, flags=no_pyobj_flags):
self._check_ordering_op(lt_usecase)
def test_le(self, flags=no_pyobj_flags):
self._check_ordering_op(le_usecase)
def test_gt(self, flags=no_pyobj_flags):
self._check_ordering_op(gt_usecase)
def test_ge(self, flags=no_pyobj_flags):
self._check_ordering_op(ge_usecase)
def test_len(self, flags=no_pyobj_flags):
pyfunc = len_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
self.assertEqual(pyfunc(s), cfunc(s))
def test_bool(self, flags=no_pyobj_flags):
pyfunc = bool_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
self.assertEqual(pyfunc(s), cfunc(s))
def test_expandtabs(self):
pyfunc = expandtabs_usecase
cfunc = njit(pyfunc)
cases = ['', '\t', 't\tt\t', 'a\t', '\t⚡', 'a\tbc\nab\tc',
'🐍\t⚡', '🐍⚡\n\t\t🐍\t', 'ab\rab\t\t\tab\r\n\ta']
msg = 'Results of "{}".expandtabs() must be equal'
for s in cases:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_expandtabs_with_tabsize(self):
fns = [njit(expandtabs_with_tabsize_usecase),
njit(expandtabs_with_tabsize_kwarg_usecase)]
messages = ['Results of "{}".expandtabs({}) must be equal',
'Results of "{}".expandtabs(tabsize={}) must be equal']
cases = ['', '\t', 't\tt\t', 'a\t', '\t⚡', 'a\tbc\nab\tc',
'🐍\t⚡', '🐍⚡\n\t\t🐍\t', 'ab\rab\t\t\tab\r\n\ta']
for s in cases:
for tabsize in range(-1, 10):
for fn, msg in zip(fns, messages):
self.assertEqual(fn.py_func(s, tabsize), fn(s, tabsize),
msg=msg.format(s, tabsize))
def test_expandtabs_exception_noninteger_tabsize(self):
pyfunc = expandtabs_with_tabsize_usecase
cfunc = njit(pyfunc)
accepted_types = (types.Integer, int)
with self.assertRaises(TypingError) as raises:
cfunc('\t', 2.4)
msg = '"tabsize" must be {}, not float'.format(accepted_types)
self.assertIn(msg, str(raises.exception))
def test_startswith(self, flags=no_pyobj_flags):
pyfunc = startswith_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
for b in ['', 'x', a[:-2], a[3:], a, a + a]:
self.assertEqual(pyfunc(a, b),
cfunc(a, b),
'%s, %s' % (a, b))
def test_endswith(self, flags=no_pyobj_flags):
pyfunc = endswith_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
for b in ['', 'x', a[:-2], a[3:], a, a + a]:
self.assertEqual(pyfunc(a, b),
cfunc(a, b),
'%s, %s' % (a, b))
def test_endswith_default(self):
pyfunc = endswith_usecase
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/865c3b257fe38154a4320c7ee6afb416f665b9c2/Lib/test/string_tests.py#L1049-L1099 # noqa: E501
cpython_str = ['hello', 'helloworld', '']
cpython_subs = [
'he', 'hello', 'helloworld', 'ello',
'', 'lowo', 'lo', 'he', 'lo', 'o',
]
extra_subs = ['hellohellohello', ' ']
for s in cpython_str + UNICODE_EXAMPLES:
default_subs = ['', 'x', s[:-2], s[3:], s, s + s]
for sub_str in cpython_subs + default_subs + extra_subs:
msg = 'Results "{}".endswith("{}") must be equal'
self.assertEqual(pyfunc(s, sub_str), cfunc(s, sub_str),
msg=msg.format(s, sub_str))
def test_endswith_with_start(self):
pyfunc = endswith_with_start_only_usecase
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/865c3b257fe38154a4320c7ee6afb416f665b9c2/Lib/test/string_tests.py#L1049-L1099 # noqa: E501
cpython_str = ['hello', 'helloworld', '']
cpython_subs = [
'he', 'hello', 'helloworld', 'ello',
'', 'lowo', 'lo', 'he', 'lo', 'o',
]
extra_subs = ['hellohellohello', ' ']
for s in cpython_str + UNICODE_EXAMPLES:
default_subs = ['', 'x', s[:-2], s[3:], s, s + s]
for sub_str in cpython_subs + default_subs + extra_subs:
for start in list(range(-20, 20)) + [None]:
msg = 'Results "{}".endswith("{}", {}) must be equal'
self.assertEqual(pyfunc(s, sub_str, start),
cfunc(s, sub_str, start),
msg=msg.format(s, sub_str, start))
def test_endswith_with_start_end(self):
pyfunc = endswith_with_start_end_usecase
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/865c3b257fe38154a4320c7ee6afb416f665b9c2/Lib/test/string_tests.py#LL1049-L1099 # noqa: E501
cpython_str = ['hello', 'helloworld', '']
cpython_subs = [
'he', 'hello', 'helloworld', 'ello',
'', 'lowo', 'lo', 'he', 'lo', 'o',
]
extra_subs = ['hellohellohello', ' ']
for s in cpython_str + UNICODE_EXAMPLES:
default_subs = ['', 'x', s[:-2], s[3:], s, s + s]
for sub_str in cpython_subs + default_subs + extra_subs:
for start in list(range(-20, 20)) + [None]:
for end in list(range(-20, 20)) + [None]:
msg = 'Results "{}".endswith("{}", {}, {})\
must be equal'
self.assertEqual(pyfunc(s, sub_str, start, end),
cfunc(s, sub_str, start, end),
msg=msg.format(s, sub_str, start, end))
def test_endswith_tuple(self):
pyfunc = endswith_usecase
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/865c3b257fe38154a4320c7ee6afb416f665b9c2/Lib/test/string_tests.py#L1049-L1099 # noqa: E501
cpython_str = ['hello', 'helloworld', '']
cpython_subs = [
'he', 'hello', 'helloworld', 'ello',
'', 'lowo', 'lo', 'he', 'lo', 'o',
]
extra_subs = ['hellohellohello', ' ']
for s in cpython_str + UNICODE_EXAMPLES:
default_subs = ['', 'x', s[:-2], s[3:], s, s + s]
for sub_str in cpython_subs + default_subs + extra_subs:
msg = 'Results "{}".endswith({}) must be equal'
tuple_subs = (sub_str, 'lo')
self.assertEqual(pyfunc(s, tuple_subs),
cfunc(s, tuple_subs),
msg=msg.format(s, tuple_subs))
def test_endswith_tuple_args(self):
pyfunc = endswith_with_start_end_usecase
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/865c3b257fe38154a4320c7ee6afb416f665b9c2/Lib/test/string_tests.py#L1049-L1099 # noqa: E501
cpython_str = ['hello', 'helloworld', '']
cpython_subs = [
'he', 'hello', 'helloworld', 'ello',
'', 'lowo', 'lo', 'he', 'lo', 'o',
]
extra_subs = ['hellohellohello', ' ']
for s in cpython_str + UNICODE_EXAMPLES:
default_subs = ['', 'x', s[:-2], s[3:], s, s + s]
for sub_str in cpython_subs + default_subs + extra_subs:
for start in list(range(-20, 20)) + [None]:
for end in list(range(-20, 20)) + [None]:
msg = 'Results "{}".endswith("{}", {}, {})\
must be equal'
tuple_subs = (sub_str, 'lo')
self.assertEqual(pyfunc(s, tuple_subs, start, end),
cfunc(s, tuple_subs, start, end),
msg=msg.format(s, tuple_subs,
start, end))
def test_in(self, flags=no_pyobj_flags):
pyfunc = in_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
extras = ['', 'xx', a[::-1], a[:-2], a[3:], a, a + a]
for substr in extras:
self.assertEqual(pyfunc(substr, a),
cfunc(substr, a),
"'%s' in '%s'?" % (substr, a))
def test_partition_exception_invalid_sep(self):
self.disable_leak_check()
pyfunc = partition_usecase
cfunc = njit(pyfunc)
# Handle empty separator exception
for func in [pyfunc, cfunc]:
with self.assertRaises(ValueError) as raises:
func('a', '')
self.assertIn('empty separator', str(raises.exception))
accepted_types = (types.UnicodeType, types.UnicodeCharSeq)
with self.assertRaises(TypingError) as raises:
cfunc('a', None)
msg = '"sep" must be {}, not none'.format(accepted_types)
self.assertIn(msg, str(raises.exception))
def test_partition(self):
pyfunc = partition_usecase
cfunc = njit(pyfunc)
CASES = [
('', '⚡'),
('abcabc', '⚡'),
('🐍⚡', '⚡'),
('🐍⚡🐍', '⚡'),
('abababa', 'a'),
('abababa', 'b'),
('abababa', 'c'),
('abababa', 'ab'),
('abababa', 'aba'),
]
msg = 'Results of "{}".partition("{}") must be equal'
for s, sep in CASES:
self.assertEqual(pyfunc(s, sep), cfunc(s, sep),
msg=msg.format(s, sep))
def test_find(self, flags=no_pyobj_flags):
pyfunc = find_usecase
cfunc = njit(pyfunc)
default_subs = [
(s, ['', 'xx', s[:-2], s[3:], s]) for s in UNICODE_EXAMPLES
]
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L202-L231 # noqa: E501
cpython_subs = [
('a' * 100 + '\u0102', ['\u0102', '\u0201', '\u0120', '\u0220']),
('a' * 100 + '\U00100304', ['\U00100304', '\U00100204',
'\U00102004']),
('\u0102' * 100 + 'a', ['a']),
('\U00100304' * 100 + 'a', ['a']),
('\U00100304' * 100 + '\u0102', ['\u0102']),
('a' * 100, ['\u0102', '\U00100304', 'a\u0102', 'a\U00100304']),
('\u0102' * 100, ['\U00100304', '\u0102\U00100304']),
('\u0102' * 100 + 'a_', ['a_']),
('\U00100304' * 100 + 'a_', ['a_']),
('\U00100304' * 100 + '\u0102_', ['\u0102_']),
]
for s, subs in default_subs + cpython_subs:
for sub_str in subs:
msg = 'Results "{}".find("{}") must be equal'
self.assertEqual(pyfunc(s, sub_str), cfunc(s, sub_str),
msg=msg.format(s, sub_str))
def test_find_with_start_only(self):
pyfunc = find_with_start_only_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for sub_str in ['', 'xx', s[:-2], s[3:], s]:
for start in list(range(-20, 20)) + [None]:
msg = 'Results "{}".find("{}", {}) must be equal'
self.assertEqual(pyfunc(s, sub_str, start),
cfunc(s, sub_str, start),
msg=msg.format(s, sub_str, start))
def test_find_with_start_end(self):
pyfunc = find_with_start_end_usecase
cfunc = njit(pyfunc)
starts = ends = list(range(-20, 20)) + [None]
for s in UNICODE_EXAMPLES:
for sub_str in ['', 'xx', s[:-2], s[3:], s]:
for start, end in product(starts, ends):
msg = 'Results of "{}".find("{}", {}, {}) must be equal'
self.assertEqual(pyfunc(s, sub_str, start, end),
cfunc(s, sub_str, start, end),
msg=msg.format(s, sub_str, start, end))
def test_find_exception_noninteger_start_end(self):
pyfunc = find_with_start_end_usecase
cfunc = njit(pyfunc)
accepted = (types.Integer, types.NoneType)
for start, end, name in [(0.1, 5, 'start'), (0, 0.5, 'end')]:
with self.assertRaises(TypingError) as raises:
cfunc('ascii', 'sci', start, end)
msg = '"{}" must be {}, not float'.format(name, accepted)
self.assertIn(msg, str(raises.exception))
def test_rpartition_exception_invalid_sep(self):
self.disable_leak_check()
pyfunc = rpartition_usecase
cfunc = njit(pyfunc)
# Handle empty separator exception
for func in [pyfunc, cfunc]:
with self.assertRaises(ValueError) as raises:
func('a', '')
self.assertIn('empty separator', str(raises.exception))
accepted_types = (types.UnicodeType, types.UnicodeCharSeq)
with self.assertRaises(TypingError) as raises:
cfunc('a', None)
msg = '"sep" must be {}, not none'.format(accepted_types)
self.assertIn(msg, str(raises.exception))
def test_rpartition(self):
pyfunc = rpartition_usecase
cfunc = njit(pyfunc)
CASES = [
('', '⚡'),
('abcabc', '⚡'),
('🐍⚡', '⚡'),
('🐍⚡🐍', '⚡'),
('abababa', 'a'),
('abababa', 'b'),
('abababa', 'c'),
('abababa', 'ab'),
('abababa', 'aba'),
]
msg = 'Results of "{}".rpartition("{}") must be equal'
for s, sep in CASES:
self.assertEqual(pyfunc(s, sep), cfunc(s, sep),
msg=msg.format(s, sep))
def test_count(self):
pyfunc = count_usecase
cfunc = njit(pyfunc)
error_msg = "'{0}'.py_count('{1}') = {2}\n'{0}'.c_count('{1}') = {3}"
for s, sub in UNICODE_COUNT_EXAMPLES:
py_result = pyfunc(s, sub)
c_result = cfunc(s, sub)
self.assertEqual(py_result, c_result,
error_msg.format(s, sub, py_result, c_result))
def test_count_with_start(self):
pyfunc = count_with_start_usecase
cfunc = njit(pyfunc)
error_msg = "%s\n%s" % ("'{0}'.py_count('{1}', {2}) = {3}",
"'{0}'.c_count('{1}', {2}) = {4}")
for s, sub in UNICODE_COUNT_EXAMPLES:
for i in range(-18, 18):
py_result = pyfunc(s, sub, i)
c_result = cfunc(s, sub, i)
self.assertEqual(py_result, c_result,
error_msg.format(s, sub, i, py_result,
c_result))
py_result = pyfunc(s, sub, None)
c_result = cfunc(s, sub, None)
self.assertEqual(py_result, c_result,
error_msg.format(s, sub, None, py_result,
c_result))
def test_count_with_start_end(self):
pyfunc = count_with_start_end_usecase
cfunc = njit(pyfunc)
error_msg = "%s\n%s" % ("'{0}'.py_count('{1}', {2}, {3}) = {4}",
"'{0}'.c_count('{1}', {2}, {3}) = {5}")
for s, sub in UNICODE_COUNT_EXAMPLES:
for i, j in product(range(-18, 18), (-18, 18)):
py_result = pyfunc(s, sub, i, j)
c_result = cfunc(s, sub, i, j)
self.assertEqual(py_result, c_result,
error_msg.format(s, sub, i, j, py_result,
c_result))
for j in range(-18, 18):
py_result = pyfunc(s, sub, None, j)
c_result = cfunc(s, sub, None, j)
self.assertEqual(py_result, c_result,
error_msg.format(s, sub, None, j, py_result,
c_result))
py_result = pyfunc(s, sub, None, None)
c_result = cfunc(s, sub, None, None)
self.assertEqual(py_result, c_result,
error_msg.format(s, sub, None, None, py_result,
c_result))
def test_count_arg_type_check(self):
cfunc = njit(count_with_start_end_usecase)
with self.assertRaises(TypingError) as raises:
cfunc('ascii', 'c', 1, 0.5)
self.assertIn('The slice indices must be an Integer or None',
str(raises.exception))
with self.assertRaises(TypingError) as raises:
cfunc('ascii', 'c', 1.2, 7)
self.assertIn('The slice indices must be an Integer or None',
str(raises.exception))
with self.assertRaises(TypingError) as raises:
cfunc('ascii', 12, 1, 7)
self.assertIn('The substring must be a UnicodeType, not',
str(raises.exception))
def test_count_optional_arg_type_check(self):
pyfunc = count_with_start_end_usecase
def try_compile_bad_optional(*args):
bad_sig = types.int64(types.unicode_type,
types.unicode_type,
types.Optional(types.float64),
types.Optional(types.float64))
njit([bad_sig])(pyfunc)
with self.assertRaises(TypingError) as raises:
try_compile_bad_optional('tú quis?', 'tú', 1.1, 1.1)
self.assertIn('The slice indices must be an Integer or None',
str(raises.exception))
error_msg = "%s\n%s" % ("'{0}'.py_count('{1}', {2}, {3}) = {4}",
"'{0}'.c_count_op('{1}', {2}, {3}) = {5}")
sig_optional = types.int64(types.unicode_type,
types.unicode_type,
types.Optional(types.int64),
types.Optional(types.int64))
cfunc_optional = njit([sig_optional])(pyfunc)
py_result = pyfunc('tú quis?', 'tú', 0, 8)
c_result = cfunc_optional('tú quis?', 'tú', 0, 8)
self.assertEqual(py_result, c_result,
error_msg.format('tú quis?', 'tú', 0, 8, py_result,
c_result))
def test_rfind(self):
pyfunc = rfind_usecase
cfunc = njit(pyfunc)
default_subs = [
(s, ['', 'xx', s[:-2], s[3:], s]) for s in UNICODE_EXAMPLES
]
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L233-L259 # noqa: E501
cpython_subs = [
('\u0102' + 'a' * 100, ['\u0102', '\u0201', '\u0120', '\u0220']),
('\U00100304' + 'a' * 100, ['\U00100304', '\U00100204',
'\U00102004']),
('abcdefghiabc', ['abc', '']),
('a' + '\u0102' * 100, ['a']),
('a' + '\U00100304' * 100, ['a']),
('\u0102' + '\U00100304' * 100, ['\u0102']),
('a' * 100, ['\u0102', '\U00100304', '\u0102a', '\U00100304a']),
('\u0102' * 100, ['\U00100304', '\U00100304\u0102']),
('_a' + '\u0102' * 100, ['_a']),
('_a' + '\U00100304' * 100, ['_a']),
('_\u0102' + '\U00100304' * 100, ['_\u0102']),
]
for s, subs in default_subs + cpython_subs:
for sub_str in subs:
msg = 'Results "{}".rfind("{}") must be equal'
self.assertEqual(pyfunc(s, sub_str), cfunc(s, sub_str),
msg=msg.format(s, sub_str))
def test_rfind_with_start_only(self):
pyfunc = rfind_with_start_only_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for sub_str in ['', 'xx', s[:-2], s[3:], s]:
for start in list(range(-20, 20)) + [None]:
msg = 'Results "{}".rfind("{}", {}) must be equal'
self.assertEqual(pyfunc(s, sub_str, start),
cfunc(s, sub_str, start),
msg=msg.format(s, sub_str, start))
def test_rfind_with_start_end(self):
pyfunc = rfind_with_start_end_usecase
cfunc = njit(pyfunc)
starts = list(range(-20, 20)) + [None]
ends = list(range(-20, 20)) + [None]
for s in UNICODE_EXAMPLES:
for sub_str in ['', 'xx', s[:-2], s[3:], s]:
for start, end in product(starts, ends):
msg = 'Results of "{}".rfind("{}", {}, {}) must be equal'
self.assertEqual(pyfunc(s, sub_str, start, end),
cfunc(s, sub_str, start, end),
msg=msg.format(s, sub_str, start, end))
def test_rfind_wrong_substr(self):
cfunc = njit(rfind_usecase)
for s in UNICODE_EXAMPLES:
for sub_str in [None, 1, False]:
with self.assertRaises(TypingError) as raises:
cfunc(s, sub_str)
msg = 'must be {}'.format(types.UnicodeType)
self.assertIn(msg, str(raises.exception))
def test_rfind_wrong_start_end(self):
cfunc = njit(rfind_with_start_end_usecase)
accepted_types = (types.Integer, types.NoneType)
for s in UNICODE_EXAMPLES:
for sub_str in ['', 'xx', s[:-2], s[3:], s]:
# test wrong start
for start, end in product([0.1, False], [-1, 1]):
with self.assertRaises(TypingError) as raises:
cfunc(s, sub_str, start, end)
msg = '"start" must be {}'.format(accepted_types)
self.assertIn(msg, str(raises.exception))
# test wrong end
for start, end in product([-1, 1], [-0.1, True]):
with self.assertRaises(TypingError) as raises:
cfunc(s, sub_str, start, end)
msg = '"end" must be {}'.format(accepted_types)
self.assertIn(msg, str(raises.exception))
def test_rfind_wrong_start_end_optional(self):
s = UNICODE_EXAMPLES[0]
sub_str = s[1:-1]
accepted_types = (types.Integer, types.NoneType)
msg = 'must be {}'.format(accepted_types)
def try_compile_wrong_start_optional(*args):
wrong_sig_optional = types.int64(types.unicode_type,
types.unicode_type,
types.Optional(types.float64),
types.Optional(types.intp))
njit([wrong_sig_optional])(rfind_with_start_end_usecase)
with self.assertRaises(TypingError) as raises:
try_compile_wrong_start_optional(s, sub_str, 0.1, 1)
self.assertIn(msg, str(raises.exception))
def try_compile_wrong_end_optional(*args):
wrong_sig_optional = types.int64(types.unicode_type,
types.unicode_type,
types.Optional(types.intp),
types.Optional(types.float64))
njit([wrong_sig_optional])(rfind_with_start_end_usecase)
with self.assertRaises(TypingError) as raises:
try_compile_wrong_end_optional(s, sub_str, 1, 0.1)
self.assertIn(msg, str(raises.exception))
def test_rindex(self):
pyfunc = rindex_usecase
cfunc = njit(pyfunc)
default_subs = [
(s, ['', s[:-2], s[3:], s]) for s in UNICODE_EXAMPLES
]
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L284-L308 # noqa: E501
cpython_subs = [
('abcdefghiabc', ['', 'def', 'abc']),
('a' + '\u0102' * 100, ['a']),
('a' + '\U00100304' * 100, ['a']),
('\u0102' + '\U00100304' * 100, ['\u0102']),
('_a' + '\u0102' * 100, ['_a']),
('_a' + '\U00100304' * 100, ['_a']),
('_\u0102' + '\U00100304' * 100, ['_\u0102'])
]
for s, subs in default_subs + cpython_subs:
for sub_str in subs:
msg = 'Results "{}".rindex("{}") must be equal'
self.assertEqual(pyfunc(s, sub_str), cfunc(s, sub_str),
msg=msg.format(s, sub_str))
def test_index(self):
pyfunc = index_usecase
cfunc = njit(pyfunc)
default_subs = [
(s, ['', s[:-2], s[3:], s]) for s in UNICODE_EXAMPLES
]
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L260-L282 # noqa: E501
cpython_subs = [
('abcdefghiabc', ['', 'def', 'abc']),
('\u0102' * 100 + 'a', ['a']),
('\U00100304' * 100 + 'a', ['a']),
('\U00100304' * 100 + '\u0102', ['\u0102']),
('\u0102' * 100 + 'a_', ['a_']),
('\U00100304' * 100 + 'a_', ['a_']),
('\U00100304' * 100 + '\u0102_', ['\u0102_'])
]
for s, subs in default_subs + cpython_subs:
for sub_str in subs:
msg = 'Results "{}".index("{}") must be equal'
self.assertEqual(pyfunc(s, sub_str), cfunc(s, sub_str),
msg=msg.format(s, sub_str))
def test_index_rindex_with_start_only(self):
pyfuncs = [index_with_start_only_usecase,
rindex_with_start_only_usecase]
messages = ['Results "{}".index("{}", {}) must be equal',
'Results "{}".rindex("{}", {}) must be equal']
unicode_examples = [
'ascii',
'12345',
'1234567890',
'¡Y tú quién te crees?',
'大处着眼,小处着手。',
]
for pyfunc, msg in zip(pyfuncs, messages):
cfunc = njit(pyfunc)
for s in unicode_examples:
l = len(s)
cases = [
('', list(range(-10, l + 1))),
(s[:-2], [0] + list(range(-10, 1 - l))),
(s[3:], list(range(4)) + list(range(-10, 4 - l))),
(s, [0] + list(range(-10, 1 - l))),
]
for sub_str, starts in cases:
for start in starts + [None]:
self.assertEqual(pyfunc(s, sub_str, start),
cfunc(s, sub_str, start),
msg=msg.format(s, sub_str, start))
def test_index_rindex_with_start_end(self):
pyfuncs = [index_with_start_end_usecase, rindex_with_start_end_usecase]
messages = ['Results of "{}".index("{}", {}, {}) must be equal',
'Results of "{}".rindex("{}", {}, {}) must be equal']
unicode_examples = [
'ascii',
'12345',
'1234567890',
'¡Y tú quién te crees?',
'大处着眼,小处着手。',
]
for pyfunc, msg in zip(pyfuncs, messages):
cfunc = njit(pyfunc)
for s in unicode_examples:
l = len(s)
cases = [
('', list(range(-10, l + 1)), list(range(l, 10))),
(s[:-2], [0] + list(range(-10, 1 - l)),
[-2, -1] + list(range(l - 2, 10))),
(s[3:], list(range(4)) + list(range(-10, -1)),
list(range(l, 10))),
(s, [0] + list(range(-10, 1 - l)), list(range(l, 10))),
]
for sub_str, starts, ends in cases:
for start, end in product(starts + [None], ends):
self.assertEqual(pyfunc(s, sub_str, start, end),
cfunc(s, sub_str, start, end),
msg=msg.format(s, sub_str, start, end))
def test_index_rindex_exception_substring_not_found(self):
self.disable_leak_check()
unicode_examples = [
'ascii',
'12345',
'1234567890',
'¡Y tú quién te crees?',
'大处着眼,小处着手。',
]
pyfuncs = [index_with_start_end_usecase, rindex_with_start_end_usecase]
for pyfunc in pyfuncs:
cfunc = njit(pyfunc)
for s in unicode_examples:
l = len(s)
cases = [
('', list(range(l + 1, 10)), [l]),
(s[:-2], [0], list(range(l - 2))),
(s[3:], list(range(4, 10)), [l]),
(s, [None], list(range(l))),
]
for sub_str, starts, ends in cases:
for start, end in product(starts, ends):
for func in [pyfunc, cfunc]:
with self.assertRaises(ValueError) as raises:
func(s, sub_str, start, end)
msg = 'substring not found'
self.assertIn(msg, str(raises.exception))
def test_index_rindex_exception_noninteger_start_end(self):
accepted = (types.Integer, types.NoneType)
pyfuncs = [index_with_start_end_usecase, rindex_with_start_end_usecase]
for pyfunc in pyfuncs:
cfunc = njit(pyfunc)
for start, end, name in [(0.1, 5, 'start'), (0, 0.5, 'end')]:
with self.assertRaises(TypingError) as raises:
cfunc('ascii', 'sci', start, end)
msg = '"{}" must be {}, not float'.format(name, accepted)
self.assertIn(msg, str(raises.exception))
def test_getitem(self):
pyfunc = getitem_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for i in range(-len(s), len(s)):
self.assertEqual(pyfunc(s, i),
cfunc(s, i),
"'%s'[%d]?" % (s, i))
def test_getitem_scalar_kind(self):
# See issue #6135, make sure that getitem returns a char of the minimal
# kind required to represent the "got" item, this is done via the use
# of `hash` in the test function as it is sensitive to kind.
pyfunc = getitem_check_kind_usecase
cfunc = njit(pyfunc)
samples = ['a\u1234', '¡着']
for s in samples:
for i in range(-len(s), len(s)):
self.assertEqual(pyfunc(s, i),
cfunc(s, i),
"'%s'[%d]?" % (s, i))
def test_getitem_error(self):
self.disable_leak_check()
pyfunc = getitem_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
with self.assertRaises(IndexError) as raises:
pyfunc(s, len(s))
self.assertIn('string index out of range', str(raises.exception))
with self.assertRaises(IndexError) as raises:
cfunc(s, len(s))
self.assertIn('string index out of range', str(raises.exception))
def test_slice2(self):
pyfunc = getitem_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for i in list(range(-len(s), len(s))):
for j in list(range(-len(s), len(s))):
sl = slice(i, j)
self.assertEqual(pyfunc(s, sl),
cfunc(s, sl),
"'%s'[%d:%d]?" % (s, i, j))
def test_slice2_error(self):
pyfunc = getitem_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for i in [-2, -1, len(s), len(s) + 1]:
for j in [-2, -1, len(s), len(s) + 1]:
sl = slice(i, j)
self.assertEqual(pyfunc(s, sl),
cfunc(s, sl),
"'%s'[%d:%d]?" % (s, i, j))
def test_getitem_slice2_kind(self):
# See issue #6135. Also see note in test_getitem_scalar_kind regarding
# testing.
pyfunc = getitem_check_kind_usecase
cfunc = njit(pyfunc)
samples = ['abc\u1234\u1234', '¡¡¡着着着']
for s in samples:
for i in [-2, -1, 0, 1, 2, len(s), len(s) + 1]:
for j in [-2, -1, 0, 1, 2, len(s), len(s) + 1]:
sl = slice(i, j)
self.assertEqual(pyfunc(s, sl),
cfunc(s, sl),
"'%s'[%d:%d]?" % (s, i, j))
def test_slice3(self):
pyfunc = getitem_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for i in range(-len(s), len(s)):
for j in range(-len(s), len(s)):
for k in [-2, -1, 1, 2]:
sl = slice(i, j, k)
self.assertEqual(pyfunc(s, sl),
cfunc(s, sl),
"'%s'[%d:%d:%d]?" % (s, i, j, k))
def test_getitem_slice3_kind(self):
# See issue #6135. Also see note in test_getitem_scalar_kind regarding
# testing.
pyfunc = getitem_check_kind_usecase
cfunc = njit(pyfunc)
samples = ['abc\u1234\u1234',
'a\u1234b\u1234c'
'¡¡¡着着着',
'¡着¡着¡着',
'着a着b着c',
'¡着a¡着b¡着c',
'¡着a着¡c',]
for s in samples:
for i in range(-len(s), len(s)):
for j in range(-len(s), len(s)):
for k in [-2, -1, 1, 2]:
sl = slice(i, j, k)
self.assertEqual(pyfunc(s, sl),
cfunc(s, sl),
"'%s'[%d:%d:%d]?" % (s, i, j, k))
def test_slice3_error(self):
pyfunc = getitem_usecase
cfunc = njit(pyfunc)
for s in UNICODE_EXAMPLES:
for i in [-2, -1, len(s), len(s) + 1]:
for j in [-2, -1, len(s), len(s) + 1]:
for k in [-2, -1, 1, 2]:
sl = slice(i, j, k)
self.assertEqual(pyfunc(s, sl),
cfunc(s, sl),
"'%s'[%d:%d:%d]?" % (s, i, j, k))
def test_slice_ascii_flag(self):
"""
Make sure ascii flag is False when ascii and non-ascii characters are
mixed in output of Unicode slicing.
"""
@njit
def f(s):
return s[::2]._is_ascii, s[1::2]._is_ascii
s = "¿abc¡Y tú, quién te cre\t\tes?"
self.assertEqual(f(s), (0, 1))
def test_zfill(self):
pyfunc = zfill_usecase
cfunc = njit(pyfunc)
ZFILL_INPUTS = [
'ascii',
'+ascii',
'-ascii',
'-asc ii-',
'12345',
'-12345',
'+12345',
'',
'¡Y tú crs?',
'🐍⚡',
'+🐍⚡',
'-🐍⚡',
'大眼,小手。',
'+大眼,小手。',
'-大眼,小手。',
]
with self.assertRaises(TypingError) as raises:
cfunc(ZFILL_INPUTS[0], 1.1)
self.assertIn('<width> must be an Integer', str(raises.exception))
for s in ZFILL_INPUTS:
for width in range(-3, 20):
self.assertEqual(pyfunc(s, width),
cfunc(s, width))
def test_concat(self, flags=no_pyobj_flags):
pyfunc = concat_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
for b in UNICODE_EXAMPLES[::-1]:
self.assertEqual(pyfunc(a, b),
cfunc(a, b),
"'%s' + '%s'?" % (a, b))
def test_repeat(self, flags=no_pyobj_flags):
pyfunc = repeat_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
for b in (-1, 0, 1, 2, 3, 4, 5, 7, 8, 15, 70):
self.assertEqual(pyfunc(a, b),
cfunc(a, b))
self.assertEqual(pyfunc(b, a),
cfunc(b, a))
def test_repeat_exception_float(self):
self.disable_leak_check()
cfunc = njit(repeat_usecase)
with self.assertRaises(TypingError) as raises:
cfunc('hi', 2.5)
self.assertIn(_header_lead + ' Function(<built-in function mul>)',
str(raises.exception))
def test_split_exception_empty_sep(self):
self.disable_leak_check()
pyfunc = split_usecase
cfunc = njit(pyfunc)
# Handle empty separator exception
for func in [pyfunc, cfunc]:
with self.assertRaises(ValueError) as raises:
func('a', '')
self.assertIn('empty separator', str(raises.exception))
def test_split_exception_noninteger_maxsplit(self):
pyfunc = split_with_maxsplit_usecase
cfunc = njit(pyfunc)
# Handle non-integer maxsplit exception
for sep in [' ', None]:
with self.assertRaises(TypingError) as raises:
cfunc('a', sep, 2.4)
self.assertIn('float64', str(raises.exception),
'non-integer maxsplit with sep = %s' % sep)
def test_split(self):
pyfunc = split_usecase
cfunc = njit(pyfunc)
CASES = [
(' a ', None),
('', '⚡'),
('abcabc', '⚡'),
('🐍⚡', '⚡'),
('🐍⚡🐍', '⚡'),
('abababa', 'a'),
('abababa', 'b'),
('abababa', 'c'),
('abababa', 'ab'),
('abababa', 'aba'),
]
for test_str, splitter in CASES:
self.assertEqual(pyfunc(test_str, splitter),
cfunc(test_str, splitter),
"'%s'.split('%s')?" % (test_str, splitter))
def test_split_with_maxsplit(self):
CASES = [
(' a ', None, 1),
('', '⚡', 1),
('abcabc', '⚡', 1),
('🐍⚡', '⚡', 1),
('🐍⚡🐍', '⚡', 1),
('abababa', 'a', 2),
('abababa', 'b', 1),
('abababa', 'c', 2),
('abababa', 'ab', 1),
('abababa', 'aba', 5),
]
for pyfunc, fmt_str in [(split_with_maxsplit_usecase,
"'%s'.split('%s', %d)?"),
(split_with_maxsplit_kwarg_usecase,
"'%s'.split('%s', maxsplit=%d)?")]:
cfunc = njit(pyfunc)
for test_str, splitter, maxsplit in CASES:
self.assertEqual(pyfunc(test_str, splitter, maxsplit),
cfunc(test_str, splitter, maxsplit),
fmt_str % (test_str, splitter, maxsplit))
def test_split_whitespace(self):
# explicit sep=None cases covered in test_split and
# test_split_with_maxsplit
pyfunc = split_whitespace_usecase
cfunc = njit(pyfunc)
# list copied from
# https://github.com/python/cpython/blob/master/Objects/unicodetype_db.h
all_whitespace = ''.join(map(chr, [
0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x001C, 0x001D, 0x001E,
0x001F, 0x0020, 0x0085, 0x00A0, 0x1680, 0x2000, 0x2001, 0x2002,
0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A,
0x2028, 0x2029, 0x202F, 0x205F, 0x3000
]))
CASES = [
'',
'abcabc',
'🐍 ⚡',
'🐍 ⚡ 🐍',
'🐍 ⚡ 🐍 ',
' 🐍 ⚡ 🐍',
' 🐍' + all_whitespace + '⚡ 🐍 ',
]
for test_str in CASES:
self.assertEqual(pyfunc(test_str),
cfunc(test_str),
"'%s'.split()?" % (test_str,))
def test_split_exception_invalid_keepends(self):
pyfunc = splitlines_with_keepends_usecase
cfunc = njit(pyfunc)
accepted_types = (types.Integer, int, types.Boolean, bool)
for ty, keepends in (('none', None), ('unicode_type', 'None')):
with self.assertRaises(TypingError) as raises:
cfunc('\n', keepends)
msg = '"keepends" must be {}, not {}'.format(accepted_types, ty)
self.assertIn(msg, str(raises.exception))
def test_splitlines(self):
pyfunc = splitlines_usecase
cfunc = njit(pyfunc)
cases = ['', '\n', 'abc\r\rabc\r\n', '🐍⚡\v', '\f🐍⚡\f\v\v🐍\x85',
'\u2028aba\u2029baba', '\n\r\na\v\fb\x0b\x0cc\x1c\x1d\x1e']
msg = 'Results of "{}".splitlines() must be equal'
for s in cases:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_splitlines_with_keepends(self):
pyfuncs = [
splitlines_with_keepends_usecase,
splitlines_with_keepends_kwarg_usecase
]
messages = [
'Results of "{}".splitlines({}) must be equal',
'Results of "{}".splitlines(keepends={}) must be equal'
]
cases = ['', '\n', 'abc\r\rabc\r\n', '🐍⚡\v', '\f🐍⚡\f\v\v🐍\x85',
'\u2028aba\u2029baba', '\n\r\na\v\fb\x0b\x0cc\x1c\x1d\x1e']
all_keepends = [True, False, 0, 1, -1, 100]
for pyfunc, msg in zip(pyfuncs, messages):
cfunc = njit(pyfunc)
for s, keepends in product(cases, all_keepends):
self.assertEqual(pyfunc(s, keepends), cfunc(s, keepends),
msg=msg.format(s, keepends))
def test_rsplit_exception_empty_sep(self):
self.disable_leak_check()
pyfunc = rsplit_usecase
cfunc = njit(pyfunc)
# Handle empty separator exception
for func in [pyfunc, cfunc]:
with self.assertRaises(ValueError) as raises:
func('a', '')
self.assertIn('empty separator', str(raises.exception))
def test_rsplit_exception_noninteger_maxsplit(self):
pyfunc = rsplit_with_maxsplit_usecase
cfunc = njit(pyfunc)
accepted_types = (types.Integer, int)
for sep in [' ', None]:
with self.assertRaises(TypingError) as raises:
cfunc('a', sep, 2.4)
msg = '"maxsplit" must be {}, not float'.format(accepted_types)
self.assertIn(msg, str(raises.exception))
def test_rsplit(self):
pyfunc = rsplit_usecase
cfunc = njit(pyfunc)
CASES = [
(' a ', None),
('', '⚡'),
('abcabc', '⚡'),
('🐍⚡', '⚡'),
('🐍⚡🐍', '⚡'),
('abababa', 'a'),
('abababa', 'b'),
('abababa', 'c'),
('abababa', 'ab'),
('abababa', 'aba'),
]
msg = 'Results of "{}".rsplit("{}") must be equal'
for s, sep in CASES:
self.assertEqual(pyfunc(s, sep), cfunc(s, sep),
msg=msg.format(s, sep))
def test_rsplit_with_maxsplit(self):
pyfuncs = [rsplit_with_maxsplit_usecase,
rsplit_with_maxsplit_kwarg_usecase]
CASES = [
(' a ', None, 1),
('', '⚡', 1),
('abcabc', '⚡', 1),
('🐍⚡', '⚡', 1),
('🐍⚡🐍', '⚡', 1),
('abababa', 'a', 2),
('abababa', 'b', 1),
('abababa', 'c', 2),
('abababa', 'ab', 1),
('abababa', 'aba', 5),
]
messages = [
'Results of "{}".rsplit("{}", {}) must be equal',
'Results of "{}".rsplit("{}", maxsplit={}) must be equal'
]
for pyfunc, msg in zip(pyfuncs, messages):
cfunc = njit(pyfunc)
for test_str, sep, maxsplit in CASES:
self.assertEqual(pyfunc(test_str, sep, maxsplit),
cfunc(test_str, sep, maxsplit),
msg=msg.format(test_str, sep, maxsplit))
def test_rsplit_whitespace(self):
pyfunc = rsplit_whitespace_usecase
cfunc = njit(pyfunc)
# list copied from
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Objects/unicodetype_db.h#L5996-L6031 # noqa: E501
all_whitespace = ''.join(map(chr, [
0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x001C, 0x001D, 0x001E,
0x001F, 0x0020, 0x0085, 0x00A0, 0x1680, 0x2000, 0x2001, 0x2002,
0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200A,
0x2028, 0x2029, 0x202F, 0x205F, 0x3000
]))
CASES = [
'',
'abcabc',
'🐍 ⚡',
'🐍 ⚡ 🐍',
'🐍 ⚡ 🐍 ',
' 🐍 ⚡ 🐍',
' 🐍' + all_whitespace + '⚡ 🐍 ',
]
msg = 'Results of "{}".rsplit() must be equal'
for s in CASES:
self.assertEqual(pyfunc(s), cfunc(s), msg.format(s))
def test_join_empty(self):
# Can't pass empty list to nopython mode, so we have to make a
# separate test case
pyfunc = join_empty_usecase
cfunc = njit(pyfunc)
CASES = [
'',
'🐍🐍🐍',
]
for sep in CASES:
self.assertEqual(pyfunc(sep),
cfunc(sep),
"'%s'.join([])?" % (sep,))
def test_join_non_string_exception(self):
# Verify that join of list of integers raises typing exception
pyfunc = join_usecase
cfunc = njit(pyfunc)
# Handle empty separator exception
with self.assertRaises(TypingError) as raises:
cfunc('', [1, 2, 3])
# This error message is obscure, but indicates the error was trapped
# in the typing of str.join()
# Feel free to change this as we update error messages.
exc_message = str(raises.exception)
self.assertIn(
"During: resolving callee type: BoundFunction",
exc_message,
)
# could be int32 or int64
self.assertIn("reflected list(int", exc_message)
def test_join(self):
pyfunc = join_usecase
cfunc = njit(pyfunc)
CASES = [
('', ['', '', '']),
('a', ['', '', '']),
('', ['a', 'bbbb', 'c']),
('🐍🐍🐍', ['⚡⚡'] * 5),
]
for sep, parts in CASES:
self.assertEqual(pyfunc(sep, parts),
cfunc(sep, parts),
"'%s'.join('%s')?" % (sep, parts))
def test_join_interleave_str(self):
# can pass a string as the parts iterable
pyfunc = join_usecase
cfunc = njit(pyfunc)
CASES = [
('abc', '123'),
('🐍🐍🐍', '⚡⚡'),
]
for sep, parts in CASES:
self.assertEqual(pyfunc(sep, parts),
cfunc(sep, parts),
"'%s'.join('%s')?" % (sep, parts))
def test_justification(self):
for pyfunc, case_name in [(center_usecase, 'center'),
(ljust_usecase, 'ljust'),
(rjust_usecase, 'rjust')]:
cfunc = njit(pyfunc)
with self.assertRaises(TypingError) as raises:
cfunc(UNICODE_EXAMPLES[0], 1.1)
self.assertIn('The width must be an Integer', str(raises.exception))
for s in UNICODE_EXAMPLES:
for width in range(-3, 20):
self.assertEqual(pyfunc(s, width),
cfunc(s, width),
"'%s'.%s(%d)?" % (s, case_name, width))
def test_justification_fillchar(self):
for pyfunc, case_name in [(center_usecase_fillchar, 'center'),
(ljust_usecase_fillchar, 'ljust'),
(rjust_usecase_fillchar, 'rjust')]:
cfunc = njit(pyfunc)
# allowed fillchar cases
for fillchar in [' ', '+', 'ú', '处']:
with self.assertRaises(TypingError) as raises:
cfunc(UNICODE_EXAMPLES[0], 1.1, fillchar)
self.assertIn('The width must be an Integer',
str(raises.exception))
for s in UNICODE_EXAMPLES:
for width in range(-3, 20):
self.assertEqual(pyfunc(s, width, fillchar),
cfunc(s, width, fillchar),
"'%s'.%s(%d, '%s')?" % (s, case_name,
width,
fillchar))
def test_justification_fillchar_exception(self):
self.disable_leak_check()
for pyfunc in [center_usecase_fillchar,
ljust_usecase_fillchar,
rjust_usecase_fillchar]:
cfunc = njit(pyfunc)
# disallowed fillchar cases
for fillchar in ['', '+0', 'quién', '处着']:
with self.assertRaises(ValueError) as raises:
cfunc(UNICODE_EXAMPLES[0], 20, fillchar)
self.assertIn('The fill character must be exactly one',
str(raises.exception))
# forbid fillchar cases with different types
for fillchar in [1, 1.1]:
with self.assertRaises(TypingError) as raises:
cfunc(UNICODE_EXAMPLES[0], 20, fillchar)
self.assertIn('The fillchar must be a UnicodeType',
str(raises.exception))
def test_inplace_concat(self, flags=no_pyobj_flags):
pyfunc = inplace_concat_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
for b in UNICODE_EXAMPLES[::-1]:
self.assertEqual(pyfunc(a, b),
cfunc(a, b),
"'%s' + '%s'?" % (a, b))
def test_isidentifier(self):
def pyfunc(s):
return s.isidentifier()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L695-L708 # noqa: E501
cpython = ['a', 'Z', '_', 'b0', 'bc', 'b_', 'µ',
'𝔘𝔫𝔦𝔠𝔬𝔡𝔢', ' ', '[', '©', '0']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L742-L749 # noqa: E501
cpython_extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a']
msg = 'Results of "{}".isidentifier() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_strip(self):
STRIP_CASES = [
('ass cii', 'ai'),
('ass cii', None),
('asscii', 'ai '),
('asscii ', 'ai '),
(' asscii ', 'ai '),
(' asscii ', 'asci '),
(' asscii ', 's'),
(' ', ' '),
('', ' '),
('', ''),
('', None),
(' ', None),
(' asscii ', 'ai '),
(' asscii ', ''),
(' asscii ', None),
('tú quién te crees?', 'étú? '),
(' tú quién te crees? ', 'étú? '),
(' tú qrees? ', ''),
(' tú quién te crees? ', None),
('大处 着眼,小处着手。大大大处', '大处'),
(' 大处大处 ', ''),
('\t\nabcd\t', '\ta'),
(' 大处大处 ', None),
('\t abcd \t', None),
('\n abcd \n', None),
('\r abcd \r', None),
('\x0b abcd \x0b', None),
('\x0c abcd \x0c', None),
('\u2029abcd\u205F', None),
('\u0085abcd\u2009', None)
]
# form with no parameter
for pyfunc, case_name in [(strip_usecase, 'strip'),
(lstrip_usecase, 'lstrip'),
(rstrip_usecase, 'rstrip')]:
cfunc = njit(pyfunc)
for string, chars in STRIP_CASES:
self.assertEqual(pyfunc(string),
cfunc(string),
"'%s'.%s()?" % (string, case_name))
# parametrized form
for pyfunc, case_name in [(strip_usecase_chars, 'strip'),
(lstrip_usecase_chars, 'lstrip'),
(rstrip_usecase_chars, 'rstrip')]:
cfunc = njit(pyfunc)
sig1 = types.unicode_type(types.unicode_type,
types.Optional(types.unicode_type))
cfunc_optional = njit([sig1])(pyfunc)
def try_compile_bad_optional(*args):
bad = types.unicode_type(types.unicode_type,
types.Optional(types.float64))
njit([bad])(pyfunc)
for fn in cfunc, try_compile_bad_optional:
with self.assertRaises(TypingError) as raises:
fn('tú quis?', 1.1)
self.assertIn('The arg must be a UnicodeType or None',
str(raises.exception))
for fn in cfunc, cfunc_optional:
for string, chars in STRIP_CASES:
self.assertEqual(pyfunc(string, chars),
fn(string, chars),
"'%s'.%s('%s')?" % (string, case_name,
chars))
def test_isspace(self):
def pyfunc(s):
return s.isspace()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L613-L621 # noqa: E501
cpython = ['\u2000', '\u200a', '\u2014', '\U00010401', '\U00010427',
'\U00010429', '\U0001044E', '\U0001F40D', '\U0001F46F']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L742-L749 # noqa: E501
cpython_extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a']
msg = 'Results of "{}".isspace() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_istitle(self):
pyfunc = istitle_usecase
cfunc = njit(pyfunc)
error_msg = "'{0}'.py_istitle() = {1}\n'{0}'.c_istitle() = {2}"
unicode_title = [x.title() for x in UNICODE_EXAMPLES]
special = [
'',
' ',
' AA ',
' Ab ',
'1',
'A123',
'A12Bcd',
'+abA',
'12Abc',
'A12abc',
'%^Abc 5 $% Def'
'𐐁𐐩',
'𐐧𐑎',
'𐐩',
'𐑎',
'🐍 Is',
'🐍 NOT',
'👯Is',
'ῼ',
'Greek ῼitlecases ...'
]
ISTITLE_EXAMPLES = UNICODE_EXAMPLES + unicode_title + special
for s in ISTITLE_EXAMPLES:
py_result = pyfunc(s)
c_result = cfunc(s)
self.assertEqual(py_result, c_result,
error_msg.format(s, py_result, c_result))
def test_isprintable(self):
def pyfunc(s):
return s.isprintable()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L710-L723 # noqa: E501
cpython = ['', ' ', 'abcdefg', 'abcdefg\n', '\u0374', '\u0378',
'\ud800', '\U0001F46F', '\U000E0020']
msg = 'Results of "{}".isprintable() must be equal'
for s in UNICODE_EXAMPLES + cpython:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_pointless_slice(self, flags=no_pyobj_flags):
def pyfunc(a):
return a[:]
cfunc = njit(pyfunc)
args = ['a']
self.assertEqual(pyfunc(*args), cfunc(*args))
def test_walk_backwards(self, flags=no_pyobj_flags):
def pyfunc(a):
return a[::-1]
cfunc = njit(pyfunc)
args = ['a']
self.assertEqual(pyfunc(*args), cfunc(*args))
def test_stride_slice(self, flags=no_pyobj_flags):
def pyfunc(a):
return a[::2]
cfunc = njit(pyfunc)
args = ['a']
self.assertEqual(pyfunc(*args), cfunc(*args))
def test_basic_lt(self, flags=no_pyobj_flags):
def pyfunc(a, b):
return a < b
cfunc = njit(pyfunc)
args = ['ab', 'b']
self.assertEqual(pyfunc(*args), cfunc(*args))
def test_basic_gt(self, flags=no_pyobj_flags):
def pyfunc(a, b):
return a > b
cfunc = njit(pyfunc)
args = ['ab', 'b']
self.assertEqual(pyfunc(*args), cfunc(*args))
def test_comparison(self):
def pyfunc(option, x, y):
if option == '==':
return x == y
elif option == '!=':
return x != y
elif option == '<':
return x < y
elif option == '>':
return x > y
elif option == '<=':
return x <= y
elif option == '>=':
return x >= y
else:
return None
cfunc = njit(pyfunc)
for x, y in permutations(UNICODE_ORDERING_EXAMPLES, r=2):
for cmpop in ['==', '!=', '<', '>', '<=', '>=', '']:
args = [cmpop, x, y]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_literal_concat(self):
def pyfunc(x):
abc = 'abc'
if len(x):
return abc + 'b123' + x + 'IO'
else:
return x + abc + '123' + x
cfunc = njit(pyfunc)
args = ['x']
self.assertEqual(pyfunc(*args), cfunc(*args))
args = ['']
self.assertEqual(pyfunc(*args), cfunc(*args))
def test_literal_comparison(self):
def pyfunc(option):
x = 'a123'
y = 'aa12'
if option == '==':
return x == y
elif option == '!=':
return x != y
elif option == '<':
return x < y
elif option == '>':
return x > y
elif option == '<=':
return x <= y
elif option == '>=':
return x >= y
else:
return None
cfunc = njit(pyfunc)
for cmpop in ['==', '!=', '<', '>', '<=', '>=', '']:
args = [cmpop]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_literal_len(self):
def pyfunc():
return len('abc')
cfunc = njit(pyfunc)
self.assertEqual(pyfunc(), cfunc())
def test_literal_getitem(self):
def pyfunc(which):
return 'abc'[which]
cfunc = njit(pyfunc)
for a in [-1, 0, 1, slice(1, None), slice(None, -1)]:
args = [a]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_literal_in(self):
def pyfunc(x):
return x in '9876zabiuh'
cfunc = njit(pyfunc)
for a in ['a', '9', '1', '', '8uha', '987']:
args = [a]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_literal_xyzwith(self):
def pyfunc(x, y):
return 'abc'.startswith(x), 'cde'.endswith(y)
cfunc = njit(pyfunc)
for args in permutations('abcdefg', r=2):
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_literal_find(self):
def pyfunc(x):
return 'abc'.find(x), x.find('a')
cfunc = njit(pyfunc)
for a in ['ab']:
args = [a]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_not(self):
def pyfunc(x):
return not x
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
args = [a]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_capitalize(self):
def pyfunc(x):
return x.capitalize()
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L800-L815 # noqa: E501
cpython = ['\U0001044F', '\U0001044F\U0001044F', '\U00010427\U0001044F',
'\U0001044F\U00010427', 'X\U00010427x\U0001044F', 'h\u0130',
'\u1fd2\u0130', 'finnish', 'A\u0345\u03a3']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L926 # noqa: E501
cpython_extras = ['\U00010000\U00100000']
msg = 'Results of "{}".capitalize() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_isupper(self):
def pyfunc(x):
return x.isupper()
cfunc = njit(pyfunc)
uppers = [x.upper() for x in UNICODE_EXAMPLES]
extras = ["AA12A", "aa12a", "大AA12A", "大aa12a", "AAADŽA", "A 1 1 大"]
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L585-L599 # noqa: E501
cpython = ['\u2167', '\u2177', '\U00010401', '\U00010427', '\U00010429',
'\U0001044E', '\U0001F40D', '\U0001F46F']
fourxcpy = [x * 4 for x in cpython]
for a in UNICODE_EXAMPLES + uppers + extras + cpython + fourxcpy:
args = [a]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_upper(self):
def pyfunc(x):
return x.upper()
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
args = [a]
self.assertEqual(pyfunc(*args), cfunc(*args),
msg='failed on {}'.format(args))
def test_casefold(self):
def pyfunc(x):
return x.casefold()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L774-L781 # noqa: E501
cpython = ['hello', 'hELlo', 'ß', 'fi', '\u03a3',
'A\u0345\u03a3', '\u00b5']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L924 # noqa: E501
cpython_extras = ['\U00010000\U00100000']
msg = 'Results of "{}".casefold() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_isalpha(self):
def pyfunc(x):
return x.isalpha()
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L630-L640 # noqa: E501
cpython = ['\u1FFc', '\U00010401', '\U00010427', '\U00010429',
'\U0001044E', '\U0001F40D', '\U0001F46F']
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L738-L745 # noqa: E501
extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a']
msg = 'Results of "{}".isalpha() must be equal'
for s in UNICODE_EXAMPLES + [''] + extras + cpython:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
@unittest.skipUnless(_py37_or_later,
'isascii method requires Python 3.7 or later')
def test_isascii(self):
def pyfunc(x):
return x.isascii()
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/865c3b257fe38154a4320c7ee6afb416f665b9c2/Lib/test/string_tests.py#L913-L926 # noqa: E501
cpython = ['', '\x00', '\x7f', '\x00\x7f', '\x80', '\xe9', ' ']
msg = 'Results of "{}".isascii() must be equal'
for s in UNICODE_EXAMPLES + cpython:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_title(self):
pyfunc = title
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L813-L828 # noqa: E501
cpython = ['\U0001044F', '\U0001044F\U0001044F',
'\U0001044F\U0001044F \U0001044F\U0001044F',
'\U00010427\U0001044F \U00010427\U0001044F',
'\U0001044F\U00010427 \U0001044F\U00010427',
'X\U00010427x\U0001044F X\U00010427x\U0001044F',
'fiNNISH', 'A\u03a3 \u1fa1xy', 'A\u03a3A']
msg = 'Results of "{}".title() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_swapcase(self):
def pyfunc(x):
return x.swapcase()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L834-L858 # noqa: E501
cpython = ['\U0001044F', '\U00010427', '\U0001044F\U0001044F',
'\U00010427\U0001044F', '\U0001044F\U00010427',
'X\U00010427x\U0001044F', 'fi', '\u0130', '\u03a3',
'\u0345\u03a3', 'A\u0345\u03a3', 'A\u0345\u03a3a',
'A\u0345\u03a3', 'A\u03a3\u0345', '\u03a3\u0345 ',
'\u03a3', 'ß', '\u1fd2']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L928 # noqa: E501
cpython_extras = ['\U00010000\U00100000']
msg = 'Results of "{}".swapcase() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_islower(self):
pyfunc = islower_usecase
cfunc = njit(pyfunc)
lowers = [x.lower() for x in UNICODE_EXAMPLES]
extras = ['AA12A', 'aa12a', '大AA12A', '大aa12a', 'AAADŽA', 'A 1 1 大']
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L586-L600 # noqa: E501
cpython = ['\u2167', '\u2177', '\U00010401', '\U00010427',
'\U00010429', '\U0001044E', '\U0001F40D', '\U0001F46F']
cpython += [x * 4 for x in cpython]
msg = 'Results of "{}".islower() must be equal'
for s in UNICODE_EXAMPLES + lowers + [''] + extras + cpython:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_isalnum(self):
def pyfunc(x):
return x.isalnum()
cfunc = njit(pyfunc)
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L624-L628 # noqa: E501
cpython = ['\U00010401', '\U00010427', '\U00010429', '\U0001044E',
'\U0001D7F6', '\U00011066', '\U000104A0', '\U0001F107']
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L738-L745 # noqa: E501
extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a']
msg = 'Results of "{}".isalnum() must be equal'
for s in UNICODE_EXAMPLES + [''] + extras + cpython:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_lower(self):
pyfunc = lower_usecase
cfunc = njit(pyfunc)
extras = ['AA12A', 'aa12a', '大AA12A', '大aa12a', 'AAADŽA', 'A 1 1 大']
# Samples taken from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L748-L758 # noqa: E501
cpython = ['\U00010401', '\U00010427', '\U0001044E', '\U0001F46F',
'\U00010427\U00010427', '\U00010427\U0001044F',
'X\U00010427x\U0001044F', '\u0130']
# special cases for sigma from CPython testing:
# https://github.com/python/cpython/blob/201c8f79450628241574fba940e08107178dc3a5/Lib/test/test_unicode.py#L759-L768 # noqa: E501
sigma = ['\u03a3', '\u0345\u03a3', 'A\u0345\u03a3', 'A\u0345\u03a3a',
'\u03a3\u0345 ', '\U0008fffe', '\u2177']
extra_sigma = 'A\u03a3\u03a2'
sigma.append(extra_sigma)
msg = 'Results of "{}".lower() must be equal'
for s in UNICODE_EXAMPLES + [''] + extras + cpython + sigma:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_isnumeric(self):
def pyfunc(x):
return x.isnumeric()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L676-L693 # noqa: E501
cpython = ['', 'a', '0', '\u2460', '\xbc', '\u0660', '0123456789',
'0123456789a', '\U00010401', '\U00010427', '\U00010429',
'\U0001044E', '\U0001F40D', '\U0001F46F', '\U00011065',
'\U0001D7F6', '\U00011066', '\U000104A0', '\U0001F107']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L742-L749 # noqa: E501
cpython_extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800', 'a\uD800b\uDFFFa',
'a\uDFFFb\uD800a']
msg = 'Results of "{}".isnumeric() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_isdigit(self):
def pyfunc(x):
return x.isdigit()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L664-L674 # noqa: E501
cpython = ['\u2460', '\xbc', '\u0660', '\U00010401', '\U00010427',
'\U00010429', '\U0001044E', '\U0001F40D', '\U0001F46F',
'\U00011065', '\U0001D7F6', '\U00011066', '\U000104A0',
'\U0001F107']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L742-L749 # noqa: E501
cpython_extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800',
'a\uD800b\uDFFFa', 'a\uDFFFb\uD800a']
msg = 'Results of "{}".isdigit() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_isdecimal(self):
def pyfunc(x):
return x.isdecimal()
cfunc = njit(pyfunc)
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L646-L662 # noqa: E501
cpython = ['', 'a', '0', '\u2460', '\xbc', '\u0660', '0123456789',
'0123456789a', '\U00010401', '\U00010427', '\U00010429',
'\U0001044E', '\U0001F40D', '\U0001F46F', '\U00011065',
'\U0001F107', '\U0001D7F6', '\U00011066', '\U000104A0']
# https://github.com/python/cpython/blob/1d4b6ba19466aba0eb91c4ba01ba509acf18c723/Lib/test/test_unicode.py#L742-L749 # noqa: E501
cpython_extras = ['\uD800', '\uDFFF', '\uD800\uD800', '\uDFFF\uDFFF',
'a\uD800b\uDFFF', 'a\uDFFFb\uD800', 'a\uD800b\uDFFFa',
'a\uDFFFb\uD800a']
msg = 'Results of "{}".isdecimal() must be equal'
for s in UNICODE_EXAMPLES + [''] + cpython + cpython_extras:
self.assertEqual(pyfunc(s), cfunc(s), msg=msg.format(s))
def test_replace(self):
pyfunc = replace_usecase
cfunc = njit(pyfunc)
CASES = [
('abc', '', 'A'),
('', '⚡', 'A'),
('abcabc', '⚡', 'A'),
('🐍⚡', '⚡', 'A'),
('🐍⚡🐍', '⚡', 'A'),
('abababa', 'a', 'A'),
('abababa', 'b', 'A'),
('abababa', 'c', 'A'),
('abababa', 'ab', 'A'),
('abababa', 'aba', 'A'),
]
for test_str, old_str, new_str in CASES:
self.assertEqual(pyfunc(test_str, old_str, new_str),
cfunc(test_str, old_str, new_str),
"'%s'.replace('%s', '%s')?" %
(test_str, old_str, new_str))
def test_replace_with_count(self):
pyfunc = replace_with_count_usecase
cfunc = njit(pyfunc)
CASES = [
('abc', '', 'A'),
('', '⚡', 'A'),
('abcabc', '⚡', 'A'),
('🐍⚡', '⚡', 'A'),
('🐍⚡🐍', '⚡', 'A'),
('abababa', 'a', 'A'),
('abababa', 'b', 'A'),
('abababa', 'c', 'A'),
('abababa', 'ab', 'A'),
('abababa', 'aba', 'A'),
]
count_test = [-1, 1, 0, 5]
for test_str, old_str, new_str in CASES:
for count in count_test:
self.assertEqual(pyfunc(test_str, old_str, new_str, count),
cfunc(test_str, old_str, new_str, count),
"'%s'.replace('%s', '%s', '%s')?" %
(test_str, old_str, new_str, count))
def test_replace_unsupported(self):
def pyfunc(s, x, y, count):
return s.replace(x, y, count)
cfunc = njit(pyfunc)
with self.assertRaises(TypingError) as raises:
cfunc('ababababab', 'ba', 'qqq', 3.5)
msg = 'Unsupported parameters. The parametrs must be Integer.'
self.assertIn(msg, str(raises.exception))
with self.assertRaises(TypingError) as raises:
cfunc('ababababab', 0, 'qqq', 3)
msg = 'The object must be a UnicodeType.'
self.assertIn(msg, str(raises.exception))
with self.assertRaises(TypingError) as raises:
cfunc('ababababab', 'ba', 0, 3)
msg = 'The object must be a UnicodeType.'
self.assertIn(msg, str(raises.exception))
class TestUnicodeInTuple(BaseTest):
def test_const_unicode_in_tuple(self):
# Issue 3673
@njit
def f():
return ('aa',) < ('bb',)
self.assertEqual(f.py_func(), f())
@njit
def f():
return ('cc',) < ('bb',)
self.assertEqual(f.py_func(), f())
def test_const_unicode_in_hetero_tuple(self):
@njit
def f():
return ('aa', 1) < ('bb', 1)
self.assertEqual(f.py_func(), f())
@njit
def f():
return ('aa', 1) < ('aa', 2)
self.assertEqual(f.py_func(), f())
def test_ascii_flag_unbox(self):
@njit
def f(s):
return s._is_ascii
for s in UNICODE_EXAMPLES:
self.assertEqual(f(s), isascii(s))
def test_ascii_flag_join(self):
@njit
def f():
s1 = 'abc'
s2 = '123'
s3 = '🐍⚡'
s4 = '大处着眼,小处着手。'
return (",".join([s1, s2])._is_ascii,
"🐍⚡".join([s1, s2])._is_ascii,
",".join([s1, s3])._is_ascii,
",".join([s3, s4])._is_ascii)
self.assertEqual(f(), (1, 0, 0, 0))
def test_ascii_flag_getitem(self):
@njit
def f():
s1 = 'abc123'
s2 = '🐍⚡🐍⚡🐍⚡'
return (s1[0]._is_ascii, s1[2:]._is_ascii, s2[0]._is_ascii,
s2[2:]._is_ascii)
self.assertEqual(f(), (1, 1, 0, 0))
def test_ascii_flag_add_mul(self):
@njit
def f():
s1 = 'abc'
s2 = '123'
s3 = '🐍⚡'
s4 = '大处着眼,小处着手。'
return ((s1 + s2)._is_ascii,
(s1 + s3)._is_ascii,
(s3 + s4)._is_ascii,
(s1 * 2)._is_ascii,
(s3 * 2)._is_ascii)
self.assertEqual(f(), (1, 0, 0, 1, 0))
class TestUnicodeIteration(BaseTest):
def test_unicode_iter(self):
pyfunc = iter_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
self.assertPreciseEqual(pyfunc(a), cfunc(a))
def test_unicode_literal_iter(self):
pyfunc = literal_iter_usecase
cfunc = njit(pyfunc)
self.assertPreciseEqual(pyfunc(), cfunc())
def test_unicode_enumerate_iter(self):
pyfunc = enumerated_iter_usecase
cfunc = njit(pyfunc)
for a in UNICODE_EXAMPLES:
self.assertPreciseEqual(pyfunc(a), cfunc(a))
def test_unicode_stopiteration_iter(self):
self.disable_leak_check()
pyfunc = iter_stopiteration_usecase
cfunc = njit(pyfunc)
for f in (pyfunc, cfunc):
for a in UNICODE_EXAMPLES:
with self.assertRaises(StopIteration):
f(a)
def test_unicode_literal_stopiteration_iter(self):
pyfunc = literal_iter_stopiteration_usecase
cfunc = njit(pyfunc)
for f in (pyfunc, cfunc):
with self.assertRaises(StopIteration):
f()
class TestUnicodeAuxillary(BaseTest):
def test_ord(self):
pyfunc = ord_usecase
cfunc = njit(pyfunc)
for ex in UNICODE_EXAMPLES:
for a in ex:
self.assertPreciseEqual(pyfunc(a), cfunc(a))
def test_ord_invalid(self):
self.disable_leak_check()
pyfunc = ord_usecase
cfunc = njit(pyfunc)
# wrong number of chars
for func in (pyfunc, cfunc):
for ch in ('', 'abc'):
with self.assertRaises(TypeError) as raises:
func(ch)
self.assertIn('ord() expected a character',
str(raises.exception))
# wrong type
with self.assertRaises(TypingError) as raises:
cfunc(1.23)
self.assertIn(_header_lead, str(raises.exception))
def test_chr(self):
pyfunc = chr_usecase
cfunc = njit(pyfunc)
for ex in UNICODE_EXAMPLES:
for x in ex:
a = ord(x)
self.assertPreciseEqual(pyfunc(a), cfunc(a))
# test upper/lower bounds
for a in (0x0, _MAX_UNICODE):
self.assertPreciseEqual(pyfunc(a), cfunc(a))
def test_chr_invalid(self):
pyfunc = chr_usecase
cfunc = njit(pyfunc)
# value negative/>_MAX_UNICODE
for func in (pyfunc, cfunc):
for v in (-2, _MAX_UNICODE + 1):
with self.assertRaises(ValueError) as raises:
func(v)
self.assertIn("chr() arg not in range", str(raises.exception))
# wrong type
with self.assertRaises(TypingError) as raises:
cfunc('abc')
self.assertIn(_header_lead, str(raises.exception))
def test_unicode_type_mro(self):
# see issue #5635
def bar(x):
return True
@overload(bar)
def ol_bar(x):
ok = False
if isinstance(x, types.UnicodeType):
if isinstance(x, types.Hashable):
ok = True
return lambda x: ok
@njit
def foo(strinst):
return bar(strinst)
inst = "abc"
self.assertEqual(foo.py_func(inst), foo(inst))
self.assertIn(types.Hashable, types.unicode_type.__class__.__mro__)
def test_f_strings(self):
"""test f-string support, which requires bytecode handling
"""
# requires formatting (FORMAT_VALUE) and concatenation (BUILD_STRINGS)
def impl1(a):
return f"AA_{a+3}_B"
# does not require concatenation
def impl2(a):
return f"{a+2}"
# no expression
def impl3(a):
return f"ABC_{a}"
# format spec not allowed
def impl4(a):
return f"ABC_{a:0}"
# corner case: empty string
def impl5():
return f"" # noqa: F541
self.assertEqual(impl1(3), njit(impl1)(3))
self.assertEqual(impl2(2), njit(impl2)(2))
# string input
self.assertEqual(impl3("DE"), njit(impl3)("DE"))
# check error when input type doesn't have str() implementation
with self.assertRaises(TypingError) as raises:
njit(impl3)(["A", "B"])
msg = "No implementation of function Function(<class 'str'>)"
self.assertIn(msg, str(raises.exception))
# check error when format spec provided
with self.assertRaises(UnsupportedError) as raises:
njit(impl4)(["A", "B"])
msg = "format spec in f-strings not supported yet"
self.assertIn(msg, str(raises.exception))
self.assertEqual(impl5(), njit(impl5)())
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>x082.py<|end_file_name|><|fim▁begin|>data = (
'Yao ', # 0x00
'Yu ', # 0x01
'Chong ', # 0x02
'Xi ', # 0x03
'Xi ', # 0x04
'Jiu ', # 0x05
'Yu ', # 0x06
'Yu ', # 0x07
'Xing ', # 0x08
'Ju ', # 0x09
'Jiu ', # 0x0a
'Xin ', # 0x0b
'She ', # 0x0c
'She ', # 0x0d
'Yadoru ', # 0x0e
'Jiu ', # 0x0f
'Shi ', # 0x10
'Tan ', # 0x11
'Shu ', # 0x12
'Shi ', # 0x13
'Tian ', # 0x14
'Dan ', # 0x15
'Pu ', # 0x16
'Pu ', # 0x17
'Guan ', # 0x18
'Hua ', # 0x19
'Tan ', # 0x1a
'Chuan ', # 0x1b
'Shun ', # 0x1c
'Xia ', # 0x1d
'Wu ', # 0x1e
'Zhou ', # 0x1f
'Dao ', # 0x20
'Gang ', # 0x21
'Shan ', # 0x22
'Yi ', # 0x23
'[?] ', # 0x24
'Pa ', # 0x25
'Tai ', # 0x26
'Fan ', # 0x27
'Ban ', # 0x28
'Chuan ', # 0x29
'Hang ', # 0x2a
'Fang ', # 0x2b
'Ban ', # 0x2c
'Que ', # 0x2d
'Hesaki ', # 0x2e
'Zhong ', # 0x2f
'Jian ', # 0x30
'Cang ', # 0x31
'Ling ', # 0x32
'Zhu ', # 0x33
'Ze ', # 0x34
'Duo ', # 0x35
'Bo ', # 0x36
'Xian ', # 0x37
'Ge ', # 0x38
'Chuan ', # 0x39
'Jia ', # 0x3a
'Lu ', # 0x3b
'Hong ', # 0x3c
'Pang ', # 0x3d
'Xi ', # 0x3e
'[?] ', # 0x3f
'Fu ', # 0x40
'Zao ', # 0x41
'Feng ', # 0x42
'Li ', # 0x43
'Shao ', # 0x44
'Yu ', # 0x45
'Lang ', # 0x46
'Ting ', # 0x47
'[?] ', # 0x48
'Wei ', # 0x49
'Bo ', # 0x4a
'Meng ', # 0x4b
'Nian ', # 0x4c
'Ju ', # 0x4d
'Huang ', # 0x4e
'Shou ', # 0x4f
'Zong ', # 0x50
'Bian ', # 0x51
'Mao ', # 0x52
'Die ', # 0x53
'[?] ', # 0x54
'Bang ', # 0x55
'Cha ', # 0x56
'Yi ', # 0x57
'Sao ', # 0x58
'Cang ', # 0x59
'Cao ', # 0x5a
'Lou ', # 0x5b
'Dai ', # 0x5c
'Sori ', # 0x5d
'Yao ', # 0x5e
'Tong ', # 0x5f
'Yofune ', # 0x60
'Dang ', # 0x61
'Tan ', # 0x62
'Lu ', # 0x63
'Yi ', # 0x64
'Jie ', # 0x65
'Jian ', # 0x66
'Huo ', # 0x67
'Meng ', # 0x68
'Qi ', # 0x69
'Lu ', # 0x6a
'Lu ', # 0x6b
'Chan ', # 0x6c
'Shuang ', # 0x6d
'Gen ', # 0x6e
'Liang ', # 0x6f
'Jian ', # 0x70
'Jian ', # 0x71
'Se ', # 0x72
'Yan ', # 0x73
'Fu ', # 0x74
'Ping ', # 0x75
'Yan ', # 0x76
'Yan ', # 0x77
'Cao ', # 0x78
'Cao ', # 0x79
'Yi ', # 0x7a
'Le ', # 0x7b
'Ting ', # 0x7c
'Qiu ', # 0x7d
'Ai ', # 0x7e
'Nai ', # 0x7f
'Tiao ', # 0x80
'Jiao ', # 0x81
'Jie ', # 0x82
'Peng ', # 0x83
'Wan ', # 0x84
'Yi ', # 0x85
'Chai ', # 0x86
'Mian ', # 0x87
'Mie ', # 0x88
'Gan ', # 0x89
'Qian ', # 0x8a
'Yu ', # 0x8b
'Yu ', # 0x8c
'Shuo ', # 0x8d
'Qiong ', # 0x8e
'Tu ', # 0x8f
'Xia ', # 0x90
'Qi ', # 0x91
'Mang ', # 0x92
'Zi ', # 0x93
'Hui ', # 0x94
'Sui ', # 0x95
'Zhi ', # 0x96
'Xiang ', # 0x97
'Bi ', # 0x98
'Fu ', # 0x99
'Tun ', # 0x9a
'Wei ', # 0x9b
'Wu ', # 0x9c
'Zhi ', # 0x9d
'Qi ', # 0x9e
'Shan ', # 0x9f
'Wen ', # 0xa0
'Qian ', # 0xa1
'Ren ', # 0xa2
'Fou ', # 0xa3
'Kou ', # 0xa4
'Jie ', # 0xa5
'Lu ', # 0xa6
'Xu ', # 0xa7
'Ji ', # 0xa8
'Qin ', # 0xa9
'Qi ', # 0xaa
'Yuan ', # 0xab
'Fen ', # 0xac
'Ba ', # 0xad
'Rui ', # 0xae
'Xin ', # 0xaf
'Ji ', # 0xb0
'Hua ', # 0xb1
'Hua ', # 0xb2
'Fang ', # 0xb3
'Wu ', # 0xb4
'Jue ', # 0xb5
'Gou ', # 0xb6
'Zhi ', # 0xb7
'Yun ', # 0xb8
'Qin ', # 0xb9
'Ao ', # 0xba
'Chu ', # 0xbb
'Mao ', # 0xbc
'Ya ', # 0xbd
'Fei ', # 0xbe
'Reng ', # 0xbf
'Hang ', # 0xc0
'Cong ', # 0xc1
'Yin ', # 0xc2
'You ', # 0xc3
'Bian ', # 0xc4
'Yi ', # 0xc5
'Susa ', # 0xc6
'Wei ', # 0xc7
'Li ', # 0xc8
'Pi ', # 0xc9
'E ', # 0xca
'Xian ', # 0xcb
'Chang ', # 0xcc<|fim▁hole|>'Cang ', # 0xcd
'Meng ', # 0xce
'Su ', # 0xcf
'Yi ', # 0xd0
'Yuan ', # 0xd1
'Ran ', # 0xd2
'Ling ', # 0xd3
'Tai ', # 0xd4
'Tiao ', # 0xd5
'Di ', # 0xd6
'Miao ', # 0xd7
'Qiong ', # 0xd8
'Li ', # 0xd9
'Yong ', # 0xda
'Ke ', # 0xdb
'Mu ', # 0xdc
'Pei ', # 0xdd
'Bao ', # 0xde
'Gou ', # 0xdf
'Min ', # 0xe0
'Yi ', # 0xe1
'Yi ', # 0xe2
'Ju ', # 0xe3
'Pi ', # 0xe4
'Ruo ', # 0xe5
'Ku ', # 0xe6
'Zhu ', # 0xe7
'Ni ', # 0xe8
'Bo ', # 0xe9
'Bing ', # 0xea
'Shan ', # 0xeb
'Qiu ', # 0xec
'Yao ', # 0xed
'Xian ', # 0xee
'Ben ', # 0xef
'Hong ', # 0xf0
'Ying ', # 0xf1
'Zha ', # 0xf2
'Dong ', # 0xf3
'Ju ', # 0xf4
'Die ', # 0xf5
'Nie ', # 0xf6
'Gan ', # 0xf7
'Hu ', # 0xf8
'Ping ', # 0xf9
'Mei ', # 0xfa
'Fu ', # 0xfb
'Sheng ', # 0xfc
'Gu ', # 0xfd
'Bi ', # 0xfe
'Wei ', # 0xff
)<|fim▁end|> | |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":<|fim▁hole|> from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)<|fim▁end|> | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoServer.settings")
try: |
<|file_name|>toHaveHTML--tests.js<|end_file_name|><|fim▁begin|>const { shallow, mount } = require('enzyme');
const React = require('react');
function Fixture() {
return (
<div id="root">
<span id="child">Test</span>
</div>
);
}
describe('toHaveHTML', () => {
it('works with `shallow` renders', () => {
const wrapper = shallow(<Fixture />);
expect(wrapper.find('#child')).toHaveHTML(
'<span id="child">Test</span>'
);
});
it('works with `mount` renders', () => {
const wrapper = mount(<Fixture />);
expect(wrapper.find('#child')).toHaveHTML(
'<span id="child">Test</span>'
);
});
it('normalizes the quotations used', () => {
const wrapper = shallow(<Fixture />);
expect(wrapper.find('#child')).toHaveHTML(
'<span id="child">Test</span>'
);
expect(wrapper.find('#child')).toHaveHTML(
'<span id=\'child\'>Test</span>'
);
});<|fim▁hole|> });
});<|fim▁end|> |
it('works with with jasmines negation', () => {
const wrapper = shallow(<Fixture />);
expect(wrapper.find('#child')).not.toHaveHTML('foo'); |
<|file_name|>message.py<|end_file_name|><|fim▁begin|>"""Extension argument processing code
"""
__all__ = ['Message', 'NamespaceMap', 'no_default', 'registerNamespaceAlias',
'OPENID_NS', 'BARE_NS', 'OPENID1_NS', 'OPENID2_NS', 'SREG_URI',
'IDENTIFIER_SELECT']
import copy
import warnings
import urllib
from openid import oidutil
from openid import kvform
try:
ElementTree = oidutil.importElementTree()
except ImportError:
# No elementtree found, so give up, but don't fail to import,
# since we have fallbacks.
ElementTree = None
# This doesn't REALLY belong here, but where is better?
IDENTIFIER_SELECT = 'http://specs.openid.net/auth/2.0/identifier_select'
# URI for Simple Registration extension, the only commonly deployed
# OpenID 1.x extension, and so a special case
SREG_URI = 'http://openid.net/sreg/1.0'
# The OpenID 1.X namespace URI
OPENID1_NS = 'http://openid.net/signon/1.0'
THE_OTHER_OPENID1_NS = 'http://openid.net/signon/1.1'
OPENID1_NAMESPACES = OPENID1_NS, THE_OTHER_OPENID1_NS
# The OpenID 2.0 namespace URI
OPENID2_NS = 'http://specs.openid.net/auth/2.0'
# The namespace consisting of pairs with keys that are prefixed with
# "openid." but not in another namespace.
NULL_NAMESPACE = oidutil.Symbol('Null namespace')
# The null namespace, when it is an allowed OpenID namespace
OPENID_NS = oidutil.Symbol('OpenID namespace')
# The top-level namespace, excluding all pairs with keys that start
# with "openid."
BARE_NS = oidutil.Symbol('Bare namespace')
# Limit, in bytes, of identity provider and return_to URLs, including
# response payload. See OpenID 1.1 specification, Appendix D.
OPENID1_URL_LIMIT = 2047
# All OpenID protocol fields. Used to check namespace aliases.
OPENID_PROTOCOL_FIELDS = [
'ns', 'mode', 'error', 'return_to', 'contact', 'reference',
'signed', 'assoc_type', 'session_type', 'dh_modulus', 'dh_gen',
'dh_consumer_public', 'claimed_id', 'identity', 'realm',
'invalidate_handle', 'op_endpoint', 'response_nonce', 'sig',
'assoc_handle', 'trust_root', 'openid',
]
class UndefinedOpenIDNamespace(ValueError):
"""Raised if the generic OpenID namespace is accessed when there
is no OpenID namespace set for this message."""
class InvalidOpenIDNamespace(ValueError):
"""Raised if openid.ns is not a recognized value.
For recognized values, see L{Message.allowed_openid_namespaces}
"""
def __str__(self):
s = "Invalid OpenID Namespace"
if self.args:
s += " %r" % (self.args[0],)
return s
# Sentinel used for Message implementation to indicate that getArg
# should raise an exception instead of returning a default.
no_default = object()
# Global namespace / alias registration map. See
# registerNamespaceAlias.
registered_aliases = {}
class NamespaceAliasRegistrationError(Exception):
"""
Raised when an alias or namespace URI has already been registered.
"""
pass
def registerNamespaceAlias(namespace_uri, alias):
"""
Registers a (namespace URI, alias) mapping in a global namespace
alias map. Raises NamespaceAliasRegistrationError if either the
namespace URI or alias has already been registered with a
different value. This function is required if you want to use a
namespace with an OpenID 1 message.
"""
global registered_aliases
if registered_aliases.get(alias) == namespace_uri:
return
if namespace_uri in registered_aliases.values():
raise NamespaceAliasRegistrationError, \
'Namespace uri %r already registered' % (namespace_uri,)
if alias in registered_aliases:
raise NamespaceAliasRegistrationError, \
'Alias %r already registered' % (alias,)
registered_aliases[alias] = namespace_uri
class Message(object):
"""
In the implementation of this object, None represents the global
namespace as well as a namespace with no key.
@cvar namespaces: A dictionary specifying specific
namespace-URI to alias mappings that should be used when
generating namespace aliases.
@ivar ns_args: two-level dictionary of the values in this message,
grouped by namespace URI. The first level is the namespace
URI.
"""
allowed_openid_namespaces = [OPENID1_NS, THE_OTHER_OPENID1_NS, OPENID2_NS]
def __init__(self, openid_namespace=None):
"""Create an empty Message.
@raises InvalidOpenIDNamespace: if openid_namespace is not in
L{Message.allowed_openid_namespaces}
"""
self.args = {}
self.namespaces = NamespaceMap()
if openid_namespace is None:
self._openid_ns_uri = None
else:
implicit = openid_namespace in OPENID1_NAMESPACES
self.setOpenIDNamespace(openid_namespace, implicit)
def fromPostArgs(cls, args):
"""Construct a Message containing a set of POST arguments.
"""
self = cls()
# Partition into "openid." args and bare args
openid_args = {}
for key, value in args.items():
if isinstance(value, list):
raise TypeError("query dict must have one value for each key, "
"not lists of values. Query is %r" % (args,))
try:
prefix, rest = key.split('.', 1)
except ValueError:
prefix = None
if prefix != 'openid':
self.args[(BARE_NS, key)] = value
else:
openid_args[rest] = value
self._fromOpenIDArgs(openid_args)
return self
fromPostArgs = classmethod(fromPostArgs)
def fromOpenIDArgs(cls, openid_args):
"""Construct a Message from a parsed KVForm message.
@raises InvalidOpenIDNamespace: if openid.ns is not in
L{Message.allowed_openid_namespaces}
"""
self = cls()
self._fromOpenIDArgs(openid_args)
return self
fromOpenIDArgs = classmethod(fromOpenIDArgs)
def _fromOpenIDArgs(self, openid_args):
ns_args = []
# Resolve namespaces
for rest, value in openid_args.iteritems():
try:
ns_alias, ns_key = rest.split('.', 1)
except ValueError:
ns_alias = NULL_NAMESPACE
ns_key = rest
if ns_alias == 'ns':
self.namespaces.addAlias(value, ns_key)
elif ns_alias == NULL_NAMESPACE and ns_key == 'ns':
# null namespace
self.setOpenIDNamespace(value, False)
else:
ns_args.append((ns_alias, ns_key, value))
# Implicitly set an OpenID namespace definition (OpenID 1)
if not self.getOpenIDNamespace():
self.setOpenIDNamespace(OPENID1_NS, True)
# Actually put the pairs into the appropriate namespaces
for (ns_alias, ns_key, value) in ns_args:
ns_uri = self.namespaces.getNamespaceURI(ns_alias)
if ns_uri is None:
# we found a namespaced arg without a namespace URI defined
ns_uri = self._getDefaultNamespace(ns_alias)
if ns_uri is None:
ns_uri = self.getOpenIDNamespace()
ns_key = '%s.%s' % (ns_alias, ns_key)
else:
self.namespaces.addAlias(ns_uri, ns_alias, implicit=True)
self.setArg(ns_uri, ns_key, value)
def _getDefaultNamespace(self, mystery_alias):
"""OpenID 1 compatibility: look for a default namespace URI to
use for this alias."""
global registered_aliases
# Only try to map an alias to a default if it's an
# OpenID 1.x message.
if self.isOpenID1():
return registered_aliases.get(mystery_alias)
else:
return None
def setOpenIDNamespace(self, openid_ns_uri, implicit):
"""Set the OpenID namespace URI used in this message.
@raises InvalidOpenIDNamespace: if the namespace is not in
L{Message.allowed_openid_namespaces}
"""
if openid_ns_uri not in self.allowed_openid_namespaces:
raise InvalidOpenIDNamespace(openid_ns_uri)
self.namespaces.addAlias(openid_ns_uri, NULL_NAMESPACE, implicit)
self._openid_ns_uri = openid_ns_uri
def getOpenIDNamespace(self):
return self._openid_ns_uri
def isOpenID1(self):
return self.getOpenIDNamespace() in OPENID1_NAMESPACES
def isOpenID2(self):
return self.getOpenIDNamespace() == OPENID2_NS
def fromKVForm(cls, kvform_string):
"""Create a Message from a KVForm string"""
return cls.fromOpenIDArgs(kvform.kvToDict(kvform_string))
fromKVForm = classmethod(fromKVForm)
def copy(self):
return copy.deepcopy(self)
def toPostArgs(self):
"""Return all arguments with openid. in front of namespaced arguments.
"""
args = {}
# Add namespace definitions to the output
for ns_uri, alias in self.namespaces.iteritems():
if self.namespaces.isImplicit(ns_uri):
continue
if alias == NULL_NAMESPACE:
ns_key = 'openid.ns'
else:
ns_key = 'openid.ns.' + alias
args[ns_key] = oidutil.toUnicode(ns_uri).encode('UTF-8')
for (ns_uri, ns_key), value in self.args.iteritems():
key = self.getKey(ns_uri, ns_key)
# Ensure the resulting value is an UTF-8 encoded bytestring.
args[key] = oidutil.toUnicode(value).encode('UTF-8')
return args
def toArgs(self):
"""Return all namespaced arguments, failing if any
non-namespaced arguments exist."""
# FIXME - undocumented exception
post_args = self.toPostArgs()
kvargs = {}
for k, v in post_args.iteritems():
if not k.startswith('openid.'):
raise ValueError(
'This message can only be encoded as a POST, because it '
'contains arguments that are not prefixed with "openid."')
else:
kvargs[k[7:]] = v
return kvargs
def toFormMarkup(self, action_url, form_tag_attrs=None,
submit_text=u"Continue"):
"""Generate HTML form markup that contains the values in this
message, to be HTTP POSTed as x-www-form-urlencoded UTF-8.
@param action_url: The URL to which the form will be POSTed
@type action_url: str
@param form_tag_attrs: Dictionary of attributes to be added to
the form tag. 'accept-charset' and 'enctype' have defaults
that can be overridden. If a value is supplied for
'action' or 'method', it will be replaced.
@type form_tag_attrs: {unicode: unicode}
@param submit_text: The text that will appear on the submit
button for this form.
@type submit_text: unicode
@returns: A string containing (X)HTML markup for a form that
encodes the values in this Message object.
@rtype: str or unicode
"""
if ElementTree is None:
raise RuntimeError('This function requires ElementTree.')
assert action_url is not None
form = ElementTree.Element(u'form')
if form_tag_attrs:
for name, attr in form_tag_attrs.iteritems():
form.attrib[name] = attr
form.attrib[u'action'] = oidutil.toUnicode(action_url)
form.attrib[u'method'] = u'post'
form.attrib[u'accept-charset'] = u'UTF-8'
form.attrib[u'enctype'] = u'application/x-www-form-urlencoded'
for name, value in self.toPostArgs().iteritems():
attrs = {u'type': u'hidden',
u'name': oidutil.toUnicode(name),
u'value': oidutil.toUnicode(value)}
form.append(ElementTree.Element(u'input', attrs))
submit = ElementTree.Element(u'input',
{u'type':'submit', u'value':oidutil.toUnicode(submit_text)})
form.append(submit)
return ElementTree.tostring(form, encoding='utf-8')
def toURL(self, base_url):
"""Generate a GET URL with the parameters in this message
attached as query parameters."""
return oidutil.appendArgs(base_url, self.toPostArgs())
def toKVForm(self):
"""Generate a KVForm string that contains the parameters in
this message. This will fail if the message contains arguments
outside of the 'openid.' prefix.
"""
return kvform.dictToKV(self.toArgs())
def toURLEncoded(self):
"""Generate an x-www-urlencoded string"""
args = self.toPostArgs().items()
args.sort()
return urllib.urlencode(args)
def _fixNS(self, namespace):
"""Convert an input value into the internally used values of
this object
@param namespace: The string or constant to convert
@type namespace: str or unicode or BARE_NS or OPENID_NS
"""
if namespace == OPENID_NS:
if self._openid_ns_uri is None:<|fim▁hole|> else:
namespace = self._openid_ns_uri
if namespace != BARE_NS and type(namespace) not in [str, unicode]:
raise TypeError(
"Namespace must be BARE_NS, OPENID_NS or a string. got %r"
% (namespace,))
if namespace != BARE_NS and ':' not in namespace:
fmt = 'OpenID 2.0 namespace identifiers SHOULD be URIs. Got %r'
warnings.warn(fmt % (namespace,), DeprecationWarning)
if namespace == 'sreg':
fmt = 'Using %r instead of "sreg" as namespace'
warnings.warn(fmt % (SREG_URI,), DeprecationWarning,)
return SREG_URI
return namespace
def hasKey(self, namespace, ns_key):
namespace = self._fixNS(namespace)
return (namespace, ns_key) in self.args
def getKey(self, namespace, ns_key):
"""Get the key for a particular namespaced argument"""
namespace = self._fixNS(namespace)
if namespace == BARE_NS:
return ns_key
ns_alias = self.namespaces.getAlias(namespace)
# No alias is defined, so no key can exist
if ns_alias is None:
return None
if ns_alias == NULL_NAMESPACE:
tail = ns_key
else:
tail = '%s.%s' % (ns_alias, ns_key)
return 'openid.' + tail
def getArg(self, namespace, key, default=None):
"""Get a value for a namespaced key.
@param namespace: The namespace in the message for this key
@type namespace: str
@param key: The key to get within this namespace
@type key: str
@param default: The value to use if this key is absent from
this message. Using the special value
openid.message.no_default will result in this method
raising a KeyError instead of returning the default.
@rtype: str or the type of default
@raises KeyError: if default is no_default
@raises UndefinedOpenIDNamespace: if the message has not yet
had an OpenID namespace set
"""
namespace = self._fixNS(namespace)
args_key = (namespace, key)
try:
return self.args[args_key]
except KeyError:
if default is no_default:
raise KeyError((namespace, key))
else:
return default
def getArgs(self, namespace):
"""Get the arguments that are defined for this namespace URI
@returns: mapping from namespaced keys to values
@returntype: dict
"""
namespace = self._fixNS(namespace)
return dict([
(ns_key, value)
for ((pair_ns, ns_key), value)
in self.args.iteritems()
if pair_ns == namespace
])
def updateArgs(self, namespace, updates):
"""Set multiple key/value pairs in one call
@param updates: The values to set
@type updates: {unicode:unicode}
"""
namespace = self._fixNS(namespace)
for k, v in updates.iteritems():
self.setArg(namespace, k, v)
def setArg(self, namespace, key, value):
"""Set a single argument in this namespace"""
assert key is not None
assert value is not None
namespace = self._fixNS(namespace)
self.args[(namespace, key)] = value
if not (namespace is BARE_NS):
self.namespaces.add(namespace)
def delArg(self, namespace, key):
namespace = self._fixNS(namespace)
del self.args[(namespace, key)]
def __repr__(self):
return "<%s.%s %r>" % (self.__class__.__module__,
self.__class__.__name__,
self.args)
def __eq__(self, other):
return self.args == other.args
def __ne__(self, other):
return not (self == other)
def getAliasedArg(self, aliased_key, default=None):
if aliased_key == 'ns':
return self.getOpenIDNamespace()
if aliased_key.startswith('ns.'):
uri = self.namespaces.getNamespaceURI(aliased_key[3:])
if uri is None:
if default == no_default:
raise KeyError
else:
return default
else:
return uri
try:
alias, key = aliased_key.split('.', 1)
except ValueError:
# need more than x values to unpack
ns = None
else:
ns = self.namespaces.getNamespaceURI(alias)
if ns is None:
key = aliased_key
ns = self.getOpenIDNamespace()
return self.getArg(ns, key, default)
class NamespaceMap(object):
"""Maintains a bijective map between namespace uris and aliases.
"""
def __init__(self):
self.alias_to_namespace = {}
self.namespace_to_alias = {}
self.implicit_namespaces = []
def getAlias(self, namespace_uri):
return self.namespace_to_alias.get(namespace_uri)
def getNamespaceURI(self, alias):
return self.alias_to_namespace.get(alias)
def iterNamespaceURIs(self):
"""Return an iterator over the namespace URIs"""
return iter(self.namespace_to_alias)
def iterAliases(self):
"""Return an iterator over the aliases"""
return iter(self.alias_to_namespace)
def iteritems(self):
"""Iterate over the mapping
@returns: iterator of (namespace_uri, alias)
"""
return self.namespace_to_alias.iteritems()
def addAlias(self, namespace_uri, desired_alias, implicit=False):
"""Add an alias from this namespace URI to the desired alias
"""
# Check that desired_alias is not an openid protocol field as
# per the spec.
assert desired_alias not in OPENID_PROTOCOL_FIELDS, \
"%r is not an allowed namespace alias" % (desired_alias,)
# Check that desired_alias does not contain a period as per
# the spec.
if type(desired_alias) in [str, unicode]:
assert '.' not in desired_alias, \
"%r must not contain a dot" % (desired_alias,)
# Check that there is not a namespace already defined for
# the desired alias
current_namespace_uri = self.alias_to_namespace.get(desired_alias)
if (current_namespace_uri is not None
and current_namespace_uri != namespace_uri):
fmt = ('Cannot map %r to alias %r. '
'%r is already mapped to alias %r')
msg = fmt % (
namespace_uri,
desired_alias,
current_namespace_uri,
desired_alias)
raise KeyError(msg)
# Check that there is not already a (different) alias for
# this namespace URI
alias = self.namespace_to_alias.get(namespace_uri)
if alias is not None and alias != desired_alias:
fmt = ('Cannot map %r to alias %r. '
'It is already mapped to alias %r')
raise KeyError(fmt % (namespace_uri, desired_alias, alias))
assert (desired_alias == NULL_NAMESPACE or
type(desired_alias) in [str, unicode]), repr(desired_alias)
assert namespace_uri not in self.implicit_namespaces
self.alias_to_namespace[desired_alias] = namespace_uri
self.namespace_to_alias[namespace_uri] = desired_alias
if implicit:
self.implicit_namespaces.append(namespace_uri)
return desired_alias
def add(self, namespace_uri):
"""Add this namespace URI to the mapping, without caring what
alias it ends up with"""
# See if this namespace is already mapped to an alias
alias = self.namespace_to_alias.get(namespace_uri)
if alias is not None:
return alias
# Fall back to generating a numerical alias
i = 0
while True:
alias = 'ext' + str(i)
try:
self.addAlias(namespace_uri, alias)
except KeyError:
i += 1
else:
return alias
assert False, "Not reached"
def isDefined(self, namespace_uri):
return namespace_uri in self.namespace_to_alias
def __contains__(self, namespace_uri):
return self.isDefined(namespace_uri)
def isImplicit(self, namespace_uri):
return namespace_uri in self.implicit_namespaces<|fim▁end|> | raise UndefinedOpenIDNamespace('OpenID namespace not set') |
<|file_name|>loaders.py<|end_file_name|><|fim▁begin|>"""
Test cases for the template loaders
Note: This test requires setuptools!
"""
from django.conf import settings
if __name__ == '__main__':
settings.configure()
import unittest
import sys
import pkg_resources
import imp
import StringIO
import os.path
from django.template import TemplateDoesNotExist
from django.template.loaders.eggs import load_template_source as lts_egg
# Mock classes and objects for pkg_resources functions.
class MockProvider(pkg_resources.NullProvider):
def __init__(self, module):
pkg_resources.NullProvider.__init__(self, module)
self.module = module
def _has(self, path):
return path in self.module._resources
def _isdir(self,path):
return False
def get_resource_stream(self, manager, resource_name):
return self.module._resources[resource_name]
def _get(self, path):
return self.module._resources[path].read()
class MockLoader(object):
pass
def create_egg(name, resources):
"""
Creates a mock egg with a list of resources.
name: The name of the module.
resources: A dictionary of resources. Keys are the names and values the data.
"""
egg = imp.new_module(name)
egg.__loader__ = MockLoader()
egg._resources = resources
sys.modules[name] = egg
class EggLoader(unittest.TestCase):
def setUp(self):
pkg_resources._provider_factories[MockLoader] = MockProvider
self.empty_egg = create_egg("egg_empty", {})
self.egg_1 = create_egg("egg_1", {
os.path.normcase('templates/y.html') : StringIO.StringIO("y"),
os.path.normcase('templates/x.txt') : StringIO.StringIO("x"),
})
self._old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = []
def tearDown(self):
settings.INSTALLED_APPS = self._old_installed_apps
def test_empty(self):
"Loading any template on an empty egg should fail"
settings.INSTALLED_APPS = ['egg_empty']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_non_existing(self):
"Template loading fails if the template is not in the egg"
settings.INSTALLED_APPS = ['egg_1']
self.assertRaises(TemplateDoesNotExist, lts_egg, "not-existing.html")
def test_existing(self):
"A template can be loaded from an egg"
settings.INSTALLED_APPS = ['egg_1']
contents, template_name = lts_egg("y.html")
self.assertEqual(contents, "y")
self.assertEqual(template_name, "egg:egg_1:templates/y.html")
def test_not_installed(self):<|fim▁hole|>
if __name__ == "__main__":
unittest.main()<|fim▁end|> | "Loading an existent template from an egg not included in INSTALLED_APPS should fail"
settings.INSTALLED_APPS = []
self.assertRaises(TemplateDoesNotExist, lts_egg, "y.html") |
<|file_name|>test_oplog_manager.py<|end_file_name|><|fim▁begin|># Copyright 2013-2014 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test oplog manager methods
"""
import itertools
import re
import sys
import time
import bson
import gridfs
import pymongo
sys.path[0:0] = [""]
from mongo_connector.doc_managers.doc_manager_simulator import DocManager
from mongo_connector.locking_dict import LockingDict
from mongo_connector.namespace_config import NamespaceConfig
from mongo_connector.oplog_manager import OplogThread
from mongo_connector.test_utils import (assert_soon,
close_client,
ReplicaSetSingle)
from mongo_connector.util import bson_ts_to_long
from tests import unittest
class TestOplogManager(unittest.TestCase):
"""Defines all the testing methods, as well as a method that sets up the
cluster
"""
def setUp(self):
self.repl_set = ReplicaSetSingle().start()
self.primary_conn = self.repl_set.client()
self.oplog_coll = self.primary_conn.local['oplog.rs']
self.opman = OplogThread(
primary_client=self.primary_conn,
doc_managers=(DocManager(),),
oplog_progress_dict=LockingDict(),
namespace_config=NamespaceConfig(
namespace_options={
'test.*': True,
'gridfs.*': {'gridfs': True}
}
),
)
def tearDown(self):
try:
self.opman.join()
except RuntimeError:
pass # OplogThread may not have been started
self.primary_conn.drop_database("test")
close_client(self.primary_conn)
self.repl_set.stop()
def test_get_oplog_cursor(self):
"""Test the get_oplog_cursor method"""
# timestamp is None - all oplog entries excluding no-ops are returned.
cursor = self.opman.get_oplog_cursor(None)
self.assertEqual(cursor.count(),
self.primary_conn["local"]["oplog.rs"].find(
{'op': {'$ne': 'n'}}).count())
# earliest entry is the only one at/after timestamp
doc = {"ts": bson.Timestamp(1000, 0), "i": 1}
self.primary_conn["test"]["test"].insert_one(doc)
latest_timestamp = self.opman.get_last_oplog_timestamp()
cursor = self.opman.get_oplog_cursor(latest_timestamp)
self.assertNotEqual(cursor, None)
self.assertEqual(cursor.count(), 1)
next_entry_id = next(cursor)['o']['_id']
retrieved = self.primary_conn.test.test.find_one(next_entry_id)
self.assertEqual(retrieved, doc)
# many entries before and after timestamp
self.primary_conn["test"]["test"].insert_many(
[{"i": i} for i in range(2, 1002)])
oplog_cursor = self.oplog_coll.find(
{'op': {'$ne': 'n'},
'ns': {'$not': re.compile(r'\.(system|\$cmd)')}},
sort=[("ts", pymongo.ASCENDING)]
)
# initial insert + 1000 more inserts
self.assertEqual(oplog_cursor.count(), 1 + 1000)
pivot = oplog_cursor.skip(400).limit(-1)[0]
goc_cursor = self.opman.get_oplog_cursor(pivot["ts"])
self.assertEqual(goc_cursor.count(), 1 + 1000 - 400)
def test_get_last_oplog_timestamp(self):
"""Test the get_last_oplog_timestamp method"""
# "empty" the oplog
self.opman.oplog = self.primary_conn["test"]["emptycollection"]
self.assertEqual(self.opman.get_last_oplog_timestamp(), None)
# Test non-empty oplog
self.opman.oplog = self.primary_conn["local"]["oplog.rs"]
for i in range(1000):
self.primary_conn["test"]["test"].insert_one({
"i": i + 500
})
oplog = self.primary_conn["local"]["oplog.rs"]
oplog = oplog.find().sort("$natural", pymongo.DESCENDING).limit(-1)[0]
self.assertEqual(self.opman.get_last_oplog_timestamp(),
oplog["ts"])
def test_dump_collection(self):
"""Test the dump_collection method
Cases:
1. empty oplog
2. non-empty oplog, with gridfs collections
3. non-empty oplog, specified a namespace-set, none of the oplog
entries are for collections in the namespace-set
"""
# Test with empty oplog
self.opman.oplog = self.primary_conn["test"]["emptycollection"]
last_ts = self.opman.dump_collection()
self.assertEqual(last_ts, None)
# Test with non-empty oplog with gridfs collections
self.opman.oplog = self.primary_conn["local"]["oplog.rs"]
# Insert 10 gridfs files
for i in range(10):
fs = gridfs.GridFS(self.primary_conn["gridfs"],
collection="test" + str(i))
fs.put(b"hello world")
# Insert 1000 documents
for i in range(1000):
self.primary_conn["test"]["test"].insert_one({
"i": i + 500
})
last_ts = self.opman.get_last_oplog_timestamp()
self.assertEqual(last_ts, self.opman.dump_collection())
self.assertEqual(len(self.opman.doc_managers[0]._search()), 1010)
# Case 3
# 1MB oplog so that we can rollover quickly
repl_set = ReplicaSetSingle(oplogSize=1).start()
conn = repl_set.client()
opman = OplogThread(
primary_client=conn,
doc_managers=(DocManager(),),
oplog_progress_dict=LockingDict(),
namespace_config=NamespaceConfig(namespace_set=["test.test"]),
)
# Insert a document into an included collection
conn["test"]["test"].insert_one({"test": 1})
# Cause the oplog to rollover on a non-included collection
while conn["local"]["oplog.rs"].find_one({"ns": "test.test"}):
conn["test"]["ignored"].insert_many(
[{"test": "1" * 1024} for _ in range(1024)])
last_ts = opman.get_last_oplog_timestamp()
self.assertEqual(last_ts, opman.dump_collection())
self.assertEqual(len(opman.doc_managers[0]._search()), 1)
conn.close()
repl_set.stop()
def test_skipped_oplog_entry_updates_checkpoint(self):
repl_set = ReplicaSetSingle().start()
conn = repl_set.client()
opman = OplogThread(<|fim▁hole|> namespace_config=NamespaceConfig(namespace_set=["test.test"]),
)
opman.start()
# Insert a document into an included collection
conn["test"]["test"].insert_one({"test": 1})
last_ts = opman.get_last_oplog_timestamp()
assert_soon(lambda: last_ts == opman.checkpoint,
"OplogThread never updated checkpoint to non-skipped "
"entry.")
self.assertEqual(len(opman.doc_managers[0]._search()), 1)
# Make sure that the oplog thread updates its checkpoint on every
# oplog entry.
conn["test"]["ignored"].insert_one({"test": 1})
last_ts = opman.get_last_oplog_timestamp()
assert_soon(lambda: last_ts == opman.checkpoint,
"OplogThread never updated checkpoint to skipped entry.")
opman.join()
conn.close()
repl_set.stop()
def test_dump_collection_with_error(self):
"""Test the dump_collection method with invalid documents.
Cases:
1. non-empty oplog, continue_on_error=True, invalid documents
"""
# non-empty oplog, continue_on_error=True, invalid documents
self.opman.continue_on_error = True
self.opman.oplog = self.primary_conn["local"]["oplog.rs"]
docs = [{'a': i} for i in range(100)]
for i in range(50, 60):
docs[i]['_upsert_exception'] = True
self.primary_conn['test']['test'].insert_many(docs)
last_ts = self.opman.get_last_oplog_timestamp()
self.assertEqual(last_ts, self.opman.dump_collection())
docs = self.opman.doc_managers[0]._search()
docs.sort(key=lambda doc: doc['a'])
self.assertEqual(len(docs), 90)
expected_a = itertools.chain(range(0, 50), range(60, 100))
for doc, correct_a in zip(docs, expected_a):
self.assertEqual(doc['a'], correct_a)
def test_dump_collection_cancel(self):
"""Test that dump_collection returns None when cancelled."""
self.primary_conn["test"]["test"].insert_one({"test": "1"})
# Pretend that the OplogThead was cancelled
self.opman.running = False
self.assertIsNone(self.opman.dump_collection())
def test_init_cursor(self):
"""Test the init_cursor method
Cases:
1. no last checkpoint, no collection dump
2. no last checkpoint, collection dump ok and stuff to dump
3. no last checkpoint, nothing to dump, stuff in oplog
4. no last checkpoint, nothing to dump, nothing in oplog
5. no last checkpoint, no collection dump, stuff in oplog
6. last checkpoint exists
7. last checkpoint is behind
"""
# N.B. these sub-cases build off of each other and cannot be re-ordered
# without side-effects
# No last checkpoint, no collection dump, nothing in oplog
# "change oplog collection" to put nothing in oplog
self.opman.oplog = self.primary_conn["test"]["emptycollection"]
self.opman.collection_dump = False
self.assertTrue(all(doc['op'] == 'n'
for doc in self.opman.init_cursor()[0]))
self.assertEqual(self.opman.checkpoint, None)
# No last checkpoint, empty collections, nothing in oplog
self.opman.collection_dump = True
cursor, cursor_empty = self.opman.init_cursor()
self.assertEqual(cursor, None)
self.assertTrue(cursor_empty)
self.assertEqual(self.opman.checkpoint, None)
# No last checkpoint, empty collections, something in oplog
self.opman.oplog = self.primary_conn['local']['oplog.rs']
collection = self.primary_conn["test"]["test"]
collection.insert_one({"i": 1})
collection.delete_one({"i": 1})
time.sleep(3)
last_ts = self.opman.get_last_oplog_timestamp()
cursor, cursor_empty = self.opman.init_cursor()
self.assertFalse(cursor_empty)
self.assertEqual(self.opman.checkpoint, last_ts)
self.assertEqual(self.opman.read_last_checkpoint(), last_ts)
# No last checkpoint, no collection dump, something in oplog
# If collection dump is false the checkpoint should not be set
self.opman.checkpoint = None
self.opman.oplog_progress = LockingDict()
self.opman.collection_dump = False
collection.insert_one({"i": 2})
cursor, cursor_empty = self.opman.init_cursor()
for doc in cursor:
last_doc = doc
self.assertEqual(last_doc['o']['i'], 2)
self.assertIsNone(self.opman.checkpoint)
# Last checkpoint exists, no collection dump, something in oplog
collection.insert_many([{"i": i + 500} for i in range(1000)])
entry = list(
self.primary_conn["local"]["oplog.rs"].find(skip=200, limit=-2))
self.opman.update_checkpoint(entry[0]["ts"])
cursor, cursor_empty = self.opman.init_cursor()
self.assertEqual(next(cursor)["ts"], entry[1]["ts"])
self.assertEqual(self.opman.checkpoint, entry[0]["ts"])
self.assertEqual(self.opman.read_last_checkpoint(), entry[0]["ts"])
# Last checkpoint is behind
self.opman.update_checkpoint(bson.Timestamp(1, 0))
cursor, cursor_empty = self.opman.init_cursor()
self.assertTrue(cursor_empty)
self.assertEqual(cursor, None)
self.assertEqual(self.opman.checkpoint, bson.Timestamp(1, 0))
def test_namespace_mapping(self):
"""Test mapping of namespaces
Cases:
upsert/delete/update of documents:
1. in namespace set, mapping provided
2. outside of namespace set, mapping provided
"""
source_ns = ["test.test1", "test.test2"]
phony_ns = ["test.phony1", "test.phony2"]
dest_mapping = {"test.test1": "test.test1_dest",
"test.test2": "test.test2_dest"}
self.opman.namespace_config = NamespaceConfig(
namespace_set=source_ns, namespace_options=dest_mapping)
docman = self.opman.doc_managers[0]
# start replicating
self.opman.start()
base_doc = {"_id": 1, "name": "superman"}
# doc in namespace set
for ns in source_ns:
db, coll = ns.split(".", 1)
# test insert
self.primary_conn[db][coll].insert_one(base_doc)
assert_soon(lambda: len(docman._search()) == 1)
self.assertEqual(docman._search()[0]["ns"], dest_mapping[ns])
bad = [d for d in docman._search() if d["ns"] == ns]
self.assertEqual(len(bad), 0)
# test update
self.primary_conn[db][coll].update_one(
{"_id": 1},
{"$set": {"weakness": "kryptonite"}}
)
def update_complete():
docs = docman._search()
for d in docs:
if d.get("weakness") == "kryptonite":
return True
return False
assert_soon(update_complete)
self.assertEqual(docman._search()[0]["ns"], dest_mapping[ns])
bad = [d for d in docman._search() if d["ns"] == ns]
self.assertEqual(len(bad), 0)
# test delete
self.primary_conn[db][coll].delete_one({"_id": 1})
assert_soon(lambda: len(docman._search()) == 0)
bad = [d for d in docman._search()
if d["ns"] == dest_mapping[ns]]
self.assertEqual(len(bad), 0)
# cleanup
self.primary_conn[db][coll].delete_many({})
self.opman.doc_managers[0]._delete()
# doc not in namespace set
for ns in phony_ns:
db, coll = ns.split(".", 1)
# test insert
self.primary_conn[db][coll].insert_one(base_doc)
time.sleep(1)
self.assertEqual(len(docman._search()), 0)
# test update
self.primary_conn[db][coll].update_one(
{"_id": 1},
{"$set": {"weakness": "kryptonite"}}
)
time.sleep(1)
self.assertEqual(len(docman._search()), 0)
def test_many_targets(self):
"""Test that one OplogThread is capable of replicating to more than
one target.
"""
doc_managers = [DocManager(), DocManager(), DocManager()]
self.opman.doc_managers = doc_managers
# start replicating
self.opman.start()
self.primary_conn["test"]["test"].insert_one({
"name": "kermit",
"color": "green"
})
self.primary_conn["test"]["test"].insert_one({
"name": "elmo",
"color": "firetruck red"
})
assert_soon(
lambda: sum(len(d._search()) for d in doc_managers) == 6,
"OplogThread should be able to replicate to multiple targets"
)
self.primary_conn["test"]["test"].delete_one({"name": "elmo"})
assert_soon(
lambda: sum(len(d._search()) for d in doc_managers) == 3,
"OplogThread should be able to replicate to multiple targets"
)
for d in doc_managers:
self.assertEqual(d._search()[0]["name"], "kermit")
def test_upgrade_oplog_progress(self):
first_oplog_ts = self.opman.oplog.find_one()['ts']
# Old format oplog progress file:
progress = {str(self.opman.oplog): bson_ts_to_long(first_oplog_ts)}
# Set up oplog managers to use the old format.
oplog_progress = LockingDict()
oplog_progress.dict = progress
self.opman.oplog_progress = oplog_progress
# Cause the oplog managers to update their checkpoints.
self.opman.update_checkpoint(first_oplog_ts)
# New format should be in place now.
new_format = {self.opman.replset_name: first_oplog_ts}
self.assertEqual(
new_format,
self.opman.oplog_progress.get_dict()
)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | primary_client=conn,
doc_managers=(DocManager(),),
oplog_progress_dict=LockingDict(), |
<|file_name|>managed_rom_archive_tests.py<|end_file_name|><|fim▁begin|>import json
from mockito import *
import os
import shutil
import tempfile
import unittest
from ice.history import ManagedROMArchive
class ManagedROMArchiveTests(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.temppath = os.path.join(self.tempdir, "tempfile")
self.mock_user = mock()
self.mock_user.user_id = 1234
def tearDown(self):
shutil.rmtree(self.tempdir)
def test_previous_managed_ids_returns_none_for_missing_file(self):
missing_path = os.path.join("some", "stupid", "path")
self.assertFalse(os.path.exists(missing_path))
archive = ManagedROMArchive(missing_path)
self.assertIsNone(archive.previous_managed_ids(self.mock_user))
def test_previous_managed_ids_raises_exception_for_malformed_json(self):
with open(self.temppath, "w+") as f:
f.write("notrealjson")
with self.assertRaises(ValueError):
archive = ManagedROMArchive(self.temppath)
def test_previous_managed_ids_returns_empty_list_for_missing_user(self):
data = {
"1337": []
}
with open(self.temppath, "w+") as f:
f.write(json.dumps(data))
archive = ManagedROMArchive(self.temppath)
self.assertEquals(archive.previous_managed_ids(self.mock_user), [])
def test_previous_managed_ids_returns_list_from_json(self):
data = {
"1234": [
"1234567890",
"0987654321",
]
}
with open(self.temppath, "w+") as f:
f.write(json.dumps(data))<|fim▁hole|> self.assertEquals(archive.previous_managed_ids(self.mock_user), ["1234567890","0987654321"])
def test_set_managed_ids_creates_new_file_if_needed(self):
self.assertFalse(os.path.exists(self.temppath))
archive = ManagedROMArchive(self.temppath)
archive.set_managed_ids(self.mock_user, ["1234567890"])
self.assertTrue(os.path.exists(self.temppath))
def test_previous_managed_ids_returns_new_value_after_set_managed_ids(self):
archive = ManagedROMArchive(self.temppath)
new_ids = ["1234567890"]
self.assertNotEqual(archive.previous_managed_ids(self.mock_user), new_ids)
archive.set_managed_ids(self.mock_user, ["1234567890"])
self.assertEqual(archive.previous_managed_ids(self.mock_user), new_ids)
def test_creating_new_archive_after_set_managed_ids_uses_new_ids(self):
archive = ManagedROMArchive(self.temppath)
new_ids = ["1234567890"]
self.assertNotEqual(archive.previous_managed_ids(self.mock_user), new_ids)
archive.set_managed_ids(self.mock_user, ["1234567890"])
new_archive = ManagedROMArchive(self.temppath)
self.assertEqual(new_archive.previous_managed_ids(self.mock_user), new_ids)<|fim▁end|> | archive = ManagedROMArchive(self.temppath)
|
<|file_name|>2d4882d39dbb_add_graphql_acl_to_users.py<|end_file_name|><|fim▁begin|>"""add graphql ACL to users
Revision ID: 2d4882d39dbb
Revises: c4d0e9ec46a9
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2d4882d39dbb'
down_revision = 'dc2848563b53'
POLICY_NAME = 'wazo_default_user_policy'
ACL_TEMPLATES = ['dird.graphql.me']
policy_table = sa.sql.table(
'auth_policy', sa.Column('uuid', sa.String(38)), sa.Column('name', sa.String(80))
)
acl_template_table = sa.sql.table(
'auth_acl_template', sa.Column('id', sa.Integer), sa.Column('template', sa.Text)
)
policy_template = sa.sql.table(
'auth_policy_template',
sa.Column('policy_uuid', sa.String(38)),
sa.Column('template_id', sa.Integer),
)
def _find_acl_template(conn, acl_template):
query = (
sa.sql.select([acl_template_table.c.id])
.where(acl_template_table.c.template == acl_template)
.limit(1)
)<|fim▁hole|>
def _find_acl_templates(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if acl_template_id:
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_policy_uuid(conn, policy_name):
policy_query = sa.sql.select([policy_table.c.uuid]).where(
policy_table.c.name == policy_name
)
for policy in conn.execute(policy_query).fetchall():
return policy[0]
def _insert_acl_template(conn, acl_templates):
acl_template_ids = []
for acl_template in acl_templates:
acl_template_id = _find_acl_template(conn, acl_template)
if not acl_template_id:
query = (
acl_template_table.insert()
.returning(acl_template_table.c.id)
.values(template=acl_template)
)
acl_template_id = conn.execute(query).scalar()
acl_template_ids.append(acl_template_id)
return acl_template_ids
def _get_acl_template_ids(conn, policy_uuid):
query = sa.sql.select([policy_template.c.template_id]).where(
policy_template.c.policy_uuid == policy_uuid
)
return [acl_template_id for (acl_template_id,) in conn.execute(query).fetchall()]
def upgrade():
conn = op.get_bind()
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
acl_template_ids = _insert_acl_template(conn, ACL_TEMPLATES)
acl_template_ids_already_associated = _get_acl_template_ids(conn, policy_uuid)
for template_id in set(acl_template_ids) - set(acl_template_ids_already_associated):
query = policy_template.insert().values(
policy_uuid=policy_uuid, template_id=template_id
)
conn.execute(query)
def downgrade():
conn = op.get_bind()
acl_template_ids = _find_acl_templates(conn, ACL_TEMPLATES)
if not acl_template_ids:
return
policy_uuid = _get_policy_uuid(conn, POLICY_NAME)
if not policy_uuid:
return
delete_query = policy_template.delete().where(
sa.sql.and_(
policy_template.c.policy_uuid == policy_uuid,
policy_template.c.template_id.in_(acl_template_ids),
)
)
op.execute(delete_query)<|fim▁end|> | return conn.execute(query).scalar() |
<|file_name|>manager.py<|end_file_name|><|fim▁begin|># Copyright (c) 2008, Aldo Cortesi. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import division
try:
import tracemalloc
except ImportError:
tracemalloc = None
from libqtile.dgroups import DGroups
from xcffib.xproto import EventMask, WindowError, AccessError, DrawableError
import logging
import os
import pickle
import shlex
import signal
import sys
import traceback
import xcffib
import xcffib.xinerama
import xcffib.xproto
import six
from . import asyncio
from .config import Drag, Click, Screen, Match, Rule
from .group import _Group
from .log_utils import logger
from .state import QtileState
from .utils import QtileError, get_cache_dir
from .widget.base import _Widget
from . import command
from . import hook
from . import utils
from . import window
from . import xcbq
if sys.version_info >= (3, 3):
def _import_module(module_name, dir_path):
import importlib<|fim▁hole|>else:
def _import_module(module_name, dir_path):
import imp
fp = None
try:
fp, pathname, description = imp.find_module(module_name, [dir_path])
module = imp.load_module(module_name, fp, pathname, description)
finally:
if fp:
fp.close()
return module
class Qtile(command.CommandObject):
"""
This object is the __root__ of the command graph.
"""
def __init__(self, config, displayName=None, fname=None, no_spawn=False, state=None):
self.no_spawn = no_spawn
self._eventloop = None
self._finalize = False
if not displayName:
displayName = os.environ.get("DISPLAY")
if not displayName:
raise QtileError("No DISPLAY set.")
if not fname:
# Dots might appear in the host part of the display name
# during remote X sessions. Let's strip the host part first.
displayNum = displayName.partition(":")[2]
if "." not in displayNum:
displayName += ".0"
fname = command.find_sockfile(displayName)
self.conn = xcbq.Connection(displayName)
self.config = config
self.fname = fname
hook.init(self)
self.windowMap = {}
self.widgetMap = {}
self.groupMap = {}
self.groups = []
self.keyMap = {}
# Find the modifier mask for the numlock key, if there is one:
nc = self.conn.keysym_to_keycode(xcbq.keysyms["Num_Lock"])
self.numlockMask = xcbq.ModMasks[self.conn.get_modifier(nc)]
self.validMask = ~(self.numlockMask | xcbq.ModMasks["lock"])
# Because we only do Xinerama multi-screening,
# we can assume that the first
# screen's root is _the_ root.
self.root = self.conn.default_screen.root
self.root.set_attribute(
eventmask=(
EventMask.StructureNotify |
EventMask.SubstructureNotify |
EventMask.SubstructureRedirect |
EventMask.EnterWindow |
EventMask.LeaveWindow
)
)
self.root.set_property(
'_NET_SUPPORTED',
[self.conn.atoms[x] for x in xcbq.SUPPORTED_ATOMS]
)
self.supporting_wm_check_window = self.conn.create_window(-1, -1, 1, 1)
self.root.set_property(
'_NET_SUPPORTING_WM_CHECK',
self.supporting_wm_check_window.wid
)
# setup the default cursor
self.root.set_cursor('left_ptr')
wmname = getattr(self.config, "wmname", "qtile")
self.supporting_wm_check_window.set_property('_NET_WM_NAME', wmname)
self.supporting_wm_check_window.set_property(
'_NET_SUPPORTING_WM_CHECK',
self.supporting_wm_check_window.wid
)
if config.main:
config.main(self)
self.dgroups = None
if self.config.groups:
key_binder = None
if hasattr(self.config, 'dgroups_key_binder'):
key_binder = self.config.dgroups_key_binder
self.dgroups = DGroups(self, self.config.groups, key_binder)
if hasattr(config, "widget_defaults") and config.widget_defaults:
_Widget.global_defaults = config.widget_defaults
else:
_Widget.global_defaults = {}
for i in self.groups:
self.groupMap[i.name] = i
self.setup_eventloop()
self.server = command._Server(self.fname, self, config, self._eventloop)
self.currentScreen = None
self.screens = []
self._process_screens()
self.currentScreen = self.screens[0]
self._drag = None
self.ignoreEvents = set([
xcffib.xproto.KeyReleaseEvent,
xcffib.xproto.ReparentNotifyEvent,
xcffib.xproto.CreateNotifyEvent,
# DWM handles this to help "broken focusing windows".
xcffib.xproto.MapNotifyEvent,
xcffib.xproto.LeaveNotifyEvent,
xcffib.xproto.FocusOutEvent,
xcffib.xproto.FocusInEvent,
xcffib.xproto.NoExposureEvent
])
self.conn.flush()
self.conn.xsync()
self._xpoll()
# Map and Grab keys
for key in self.config.keys:
self.mapKey(key)
# It fixes problems with focus when clicking windows of some specific clients like xterm
def noop(qtile):
pass
self.config.mouse += (Click([], "Button1", command.lazy.function(noop), focus="after"),)
self.mouseMap = {}
for i in self.config.mouse:
if self.mouseMap.get(i.button_code) is None:
self.mouseMap[i.button_code] = []
self.mouseMap[i.button_code].append(i)
self.grabMouse()
# no_spawn is set when we are restarting; we only want to run the
# startup hook once.
if not no_spawn:
hook.fire("startup_once")
hook.fire("startup")
self.scan()
self.update_net_desktops()
hook.subscribe.setgroup(self.update_net_desktops)
if state:
st = pickle.load(six.BytesIO(state.encode()))
try:
st.apply(self)
except:
logger.exception("failed restoring state")
self.selection = {
"PRIMARY": {"owner": None, "selection": ""},
"CLIPBOARD": {"owner": None, "selection": ""}
}
self.setup_selection()
def setup_selection(self):
PRIMARY = self.conn.atoms["PRIMARY"]
CLIPBOARD = self.conn.atoms["CLIPBOARD"]
self.selection_window = self.conn.create_window(-1, -1, 1, 1)
self.selection_window.set_attribute(eventmask=EventMask.PropertyChange)
self.conn.xfixes.select_selection_input(self.selection_window,
"PRIMARY")
self.conn.xfixes.select_selection_input(self.selection_window,
"CLIPBOARD")
r = self.conn.conn.core.GetSelectionOwner(PRIMARY).reply()
self.selection["PRIMARY"]["owner"] = r.owner
r = self.conn.conn.core.GetSelectionOwner(CLIPBOARD).reply()
self.selection["CLIPBOARD"]["owner"] = r.owner
# ask for selection on starup
self.convert_selection(PRIMARY)
self.convert_selection(CLIPBOARD)
def setup_eventloop(self):
self._eventloop = asyncio.new_event_loop()
self._eventloop.add_signal_handler(signal.SIGINT, self.stop)
self._eventloop.add_signal_handler(signal.SIGTERM, self.stop)
self._eventloop.set_exception_handler(
lambda x, y: logger.exception("Got an exception in poll loop")
)
logger.info('Adding io watch')
fd = self.conn.conn.get_file_descriptor()
self._eventloop.add_reader(fd, self._xpoll)
self.setup_python_dbus()
def setup_python_dbus(self):
# This is a little strange. python-dbus internally depends on gobject,
# so gobject's threads need to be running, and a gobject "main loop
# thread" needs to be spawned, but we try to let it only interact with
# us via calls to asyncio's call_soon_threadsafe.
try:
# We import dbus here to thrown an ImportError if it isn't
# available. Since the only reason we're running this thread is
# because of dbus, if dbus isn't around there's no need to run
# this thread.
import dbus # noqa
from gi.repository import GLib
def gobject_thread():
ctx = GLib.main_context_default()
while not self._finalize:
try:
ctx.iteration(True)
except Exception:
logger.exception("got exception from gobject")
self._glib_loop = self.run_in_executor(gobject_thread)
except ImportError:
logger.warning("importing dbus/gobject failed, dbus will not work.")
self._glib_loop = None
def finalize(self):
self._finalize = True
self._eventloop.remove_signal_handler(signal.SIGINT)
self._eventloop.remove_signal_handler(signal.SIGTERM)
self._eventloop.set_exception_handler(None)
try:
from gi.repository import GLib
GLib.idle_add(lambda: None)
self._eventloop.run_until_complete(self._glib_loop)
except ImportError:
pass
try:
for w in self.widgetMap.values():
w.finalize()
for l in self.config.layouts:
l.finalize()
for screen in self.screens:
for bar in [screen.top, screen.bottom, screen.left, screen.right]:
if bar is not None:
bar.finalize()
logger.info('Removing io watch')
fd = self.conn.conn.get_file_descriptor()
self._eventloop.remove_reader(fd)
self.conn.finalize()
self.server.close()
except:
logger.exception('exception during finalize')
finally:
self._eventloop.close()
self._eventloop = None
def _process_fake_screens(self):
"""
Since Xephyr, Xnest don't really support offset screens,
we'll fake it here for testing, (or if you want to partition
a physical monitor into separate screens)
"""
for i, s in enumerate(self.config.fake_screens):
# should have x,y, width and height set
s._configure(self, i, s.x, s.y, s.width, s.height, self.groups[i])
if not self.currentScreen:
self.currentScreen = s
self.screens.append(s)
def _process_screens(self):
if hasattr(self.config, 'fake_screens'):
self._process_fake_screens()
return
# What's going on here is a little funny. What we really want is only
# screens that don't overlap here; overlapping screens should see the
# same parts of the root window (i.e. for people doing xrandr
# --same-as). However, the order that X gives us pseudo screens in is
# important, because it indicates what people have chosen via xrandr
# --primary or whatever. So we need to alias screens that should be
# aliased, but preserve order as well. See #383.
xywh = {}
screenpos = []
for s in self.conn.pseudoscreens:
pos = (s.x, s.y)
(w, h) = xywh.get(pos, (0, 0))
if pos not in xywh:
screenpos.append(pos)
xywh[pos] = (max(w, s.width), max(h, s.height))
for i, (x, y) in enumerate(screenpos):
(w, h) = xywh[(x, y)]
if i + 1 > len(self.config.screens):
scr = Screen()
else:
scr = self.config.screens[i]
if not self.currentScreen:
self.currentScreen = scr
scr._configure(
self,
i,
x,
y,
w,
h,
self.groups[i],
)
self.screens.append(scr)
if not self.screens:
if self.config.screens:
s = self.config.screens[0]
else:
s = Screen()
self.currentScreen = s
s._configure(
self,
0, 0, 0,
self.conn.default_screen.width_in_pixels,
self.conn.default_screen.height_in_pixels,
self.groups[0],
)
self.screens.append(s)
def mapKey(self, key):
self.keyMap[(key.keysym, key.modmask & self.validMask)] = key
code = self.conn.keysym_to_keycode(key.keysym)
self.root.grab_key(
code,
key.modmask,
True,
xcffib.xproto.GrabMode.Async,
xcffib.xproto.GrabMode.Async,
)
if self.numlockMask:
self.root.grab_key(
code,
key.modmask | self.numlockMask,
True,
xcffib.xproto.GrabMode.Async,
xcffib.xproto.GrabMode.Async,
)
self.root.grab_key(
code,
key.modmask | self.numlockMask | xcbq.ModMasks["lock"],
True,
xcffib.xproto.GrabMode.Async,
xcffib.xproto.GrabMode.Async,
)
def unmapKey(self, key):
key_index = (key.keysym, key.modmask & self.validMask)
if key_index not in self.keyMap:
return
code = self.conn.keysym_to_keycode(key.keysym)
self.root.ungrab_key(code, key.modmask)
if self.numlockMask:
self.root.ungrab_key(code, key.modmask | self.numlockMask)
self.root.ungrab_key(
code,
key.modmask | self.numlockMask | xcbq.ModMasks["lock"]
)
del(self.keyMap[key_index])
def update_net_desktops(self):
try:
index = self.groups.index(self.currentGroup)
# TODO: we should really only except ValueError here, AttributeError is
# an annoying chicken and egg because we're accessing currentScreen
# (via currentGroup), and when we set up the initial groups, there
# aren't any screens yet. This can probably be changed when #475 is
# fixed.
except (ValueError, AttributeError):
index = 0
self.root.set_property("_NET_NUMBER_OF_DESKTOPS", len(self.groups))
self.root.set_property(
"_NET_DESKTOP_NAMES", "\0".join([i.name for i in self.groups])
)
self.root.set_property("_NET_CURRENT_DESKTOP", index)
def addGroup(self, name, layout=None, layouts=None):
if name not in self.groupMap.keys():
g = _Group(name, layout)
self.groups.append(g)
if not layouts:
layouts = self.config.layouts
g._configure(layouts, self.config.floating_layout, self)
self.groupMap[name] = g
hook.fire("addgroup", self, name)
hook.fire("changegroup")
self.update_net_desktops()
return True
return False
def delGroup(self, name):
# one group per screen is needed
if len(self.groups) == len(self.screens):
raise ValueError("Can't delete all groups.")
if name in self.groupMap.keys():
group = self.groupMap[name]
if group.screen and group.screen.previous_group:
target = group.screen.previous_group
else:
target = group.prevGroup()
# Find a group that's not currently on a screen to bring to the
# front. This will terminate because of our check above.
while target.screen:
target = target.prevGroup()
for i in list(group.windows):
i.togroup(target.name)
if self.currentGroup.name == name:
self.currentScreen.setGroup(target, save_prev=False)
self.groups.remove(group)
del(self.groupMap[name])
hook.fire("delgroup", self, name)
hook.fire("changegroup")
self.update_net_desktops()
def registerWidget(self, w):
"""
Register a bar widget. If a widget with the same name already
exists, this will silently ignore that widget. However, this is
not necessarily a bug. By default a widget's name is just
self.__class__.lower(), so putting multiple widgets of the same
class will alias and one will be inaccessible. Since more than one
groupbox widget is useful when you have more than one screen, this
is a not uncommon occurrence. If you want to use the debug
info for widgets with the same name, set the name yourself.
"""
if w.name:
if w.name in self.widgetMap:
return
self.widgetMap[w.name] = w
@utils.LRUCache(200)
def colorPixel(self, name):
return self.conn.screens[0].default_colormap.alloc_color(name).pixel
@property
def currentLayout(self):
return self.currentGroup.layout
@property
def currentGroup(self):
return self.currentScreen.group
@property
def currentWindow(self):
return self.currentScreen.group.currentWindow
def scan(self):
_, _, children = self.root.query_tree()
for item in children:
try:
attrs = item.get_attributes()
state = item.get_wm_state()
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
continue
if attrs and attrs.map_state == xcffib.xproto.MapState.Unmapped:
continue
if state and state[0] == window.WithdrawnState:
continue
self.manage(item)
def unmanage(self, win):
c = self.windowMap.get(win)
if c:
hook.fire("client_killed", c)
self.reset_gaps(c)
if getattr(c, "group", None):
c.group.remove(c)
del self.windowMap[win]
self.update_client_list()
def reset_gaps(self, c):
if c.strut:
self.update_gaps((0, 0, 0, 0), c.strut)
def update_gaps(self, strut, old_strut=None):
from libqtile.bar import Gap
(left, right, top, bottom) = strut[:4]
if old_strut:
(old_left, old_right, old_top, old_bottom) = old_strut[:4]
if not left and old_left:
self.currentScreen.left = None
elif not right and old_right:
self.currentScreen.right = None
elif not top and old_top:
self.currentScreen.top = None
elif not bottom and old_bottom:
self.currentScreen.bottom = None
if top:
self.currentScreen.top = Gap(top)
elif bottom:
self.currentScreen.bottom = Gap(bottom)
elif left:
self.currentScreen.left = Gap(left)
elif right:
self.currentScreen.right = Gap(right)
self.currentScreen.resize()
def manage(self, w):
try:
attrs = w.get_attributes()
internal = w.get_property("QTILE_INTERNAL")
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
return
if attrs and attrs.override_redirect:
return
if w.wid not in self.windowMap:
if internal:
try:
c = window.Internal(w, self)
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
return
self.windowMap[w.wid] = c
else:
try:
c = window.Window(w, self)
except (xcffib.xproto.WindowError, xcffib.xproto.AccessError):
return
if w.get_wm_type() == "dock" or c.strut:
c.static(self.currentScreen.index)
else:
hook.fire("client_new", c)
# Window may be defunct because
# it's been declared static in hook.
if c.defunct:
return
self.windowMap[w.wid] = c
# Window may have been bound to a group in the hook.
if not c.group:
self.currentScreen.group.add(c, focus=c.can_steal_focus())
self.update_client_list()
hook.fire("client_managed", c)
return c
else:
return self.windowMap[w.wid]
def update_client_list(self):
"""
Updates the client stack list
this is needed for third party tasklists
and drag and drop of tabs in chrome
"""
windows = [wid for wid, c in self.windowMap.items() if c.group]
self.root.set_property("_NET_CLIENT_LIST", windows)
# TODO: check stack order
self.root.set_property("_NET_CLIENT_LIST_STACKING", windows)
def grabMouse(self):
self.root.ungrab_button(None, None)
for i in self.config.mouse:
if isinstance(i, Click) and i.focus:
# Make a freezing grab on mouse button to gain focus
# Event will propagate to target window
grabmode = xcffib.xproto.GrabMode.Sync
else:
grabmode = xcffib.xproto.GrabMode.Async
eventmask = EventMask.ButtonPress
if isinstance(i, Drag):
eventmask |= EventMask.ButtonRelease
self.root.grab_button(
i.button_code,
i.modmask,
True,
eventmask,
grabmode,
xcffib.xproto.GrabMode.Async,
)
if self.numlockMask:
self.root.grab_button(
i.button_code,
i.modmask | self.numlockMask,
True,
eventmask,
grabmode,
xcffib.xproto.GrabMode.Async,
)
self.root.grab_button(
i.button_code,
i.modmask | self.numlockMask | xcbq.ModMasks["lock"],
True,
eventmask,
grabmode,
xcffib.xproto.GrabMode.Async,
)
def grabKeys(self):
self.root.ungrab_key(None, None)
for key in self.keyMap.values():
self.mapKey(key)
def get_target_chain(self, ename, e):
"""
Returns a chain of targets that can handle this event. The event
will be passed to each target in turn for handling, until one of
the handlers returns False or the end of the chain is reached.
"""
chain = []
handler = "handle_%s" % ename
# Certain events expose the affected window id as an "event" attribute.
eventEvents = [
"EnterNotify",
"ButtonPress",
"ButtonRelease",
"KeyPress",
]
c = None
if hasattr(e, "window"):
c = self.windowMap.get(e.window)
elif hasattr(e, "drawable"):
c = self.windowMap.get(e.drawable)
elif ename in eventEvents:
c = self.windowMap.get(e.event)
if c and hasattr(c, handler):
chain.append(getattr(c, handler))
if hasattr(self, handler):
chain.append(getattr(self, handler))
if not chain:
logger.info("Unknown event: %r" % ename)
return chain
def _xpoll(self):
while True:
try:
e = self.conn.conn.poll_for_event()
if not e:
break
ename = e.__class__.__name__
if ename.endswith("Event"):
ename = ename[:-5]
if e.__class__ not in self.ignoreEvents:
logger.debug(ename)
for h in self.get_target_chain(ename, e):
logger.info("Handling: %s" % ename)
r = h(e)
if not r:
break
# Catch some bad X exceptions. Since X is event based, race
# conditions can occur almost anywhere in the code. For
# example, if a window is created and then immediately
# destroyed (before the event handler is evoked), when the
# event handler tries to examine the window properties, it
# will throw a WindowError exception. We can essentially
# ignore it, since the window is already dead and we've got
# another event in the queue notifying us to clean it up.
except (WindowError, AccessError, DrawableError):
pass
except Exception as e:
error_code = self.conn.conn.has_error()
if error_code:
error_string = xcbq.XCB_CONN_ERRORS[error_code]
logger.exception("Shutting down due to X connection error %s (%s)" %
(error_string, error_code))
self.stop()
break
logger.exception("Got an exception in poll loop")
self.conn.flush()
def stop(self):
logger.info('Stopping eventloop')
self._eventloop.stop()
def loop(self):
self.server.start()
try:
self._eventloop.run_forever()
finally:
self.finalize()
def find_screen(self, x, y):
"""
Find a screen based on the x and y offset.
"""
result = []
for i in self.screens:
if i.x <= x <= i.x + i.width and \
i.y <= y <= i.y + i.height:
result.append(i)
if len(result) == 1:
return result[0]
return None
def find_closest_screen(self, x, y):
"""
If find_screen returns None, then this basically extends a
screen vertically and horizontally and see if x,y lies in the
band.
Only works if it can find a SINGLE closest screen, else we
revert to _find_closest_closest.
Useful when dragging a window out of a screen onto another but
having leftmost corner above viewport.
"""
normal = self.find_screen(x, y)
if normal is not None:
return normal
x_match = []
y_match = []
for i in self.screens:
if i.x <= x <= i.x + i.width:
x_match.append(i)
if i.y <= y <= i.y + i.height:
y_match.append(i)
if len(x_match) == 1:
return x_match[0]
if len(y_match) == 1:
return y_match[0]
return self._find_closest_closest(x, y, x_match + y_match)
def _find_closest_closest(self, x, y, candidate_screens):
"""
if find_closest_screen can't determine one, we've got multiple
screens, so figure out who is closer. We'll calculate using
the square of the distance from the center of a screen.
Note that this could return None if x, y is right/below all
screens (shouldn't happen but we don't do anything about it
here other than returning None)
"""
closest_distance = None
closest_screen = None
if not candidate_screens:
# try all screens
candidate_screens = self.screens
# if left corner is below and right of screen
# it can't really be a candidate
candidate_screens = [
s for s in candidate_screens
if x < s.x + s.width and y < s.y + s.height
]
for s in candidate_screens:
middle_x = s.x + s.width / 2
middle_y = s.y + s.height / 2
distance = (x - middle_x) ** 2 + (y - middle_y) ** 2
if closest_distance is None or distance < closest_distance:
closest_distance = distance
closest_screen = s
return closest_screen
def handle_SelectionNotify(self, e):
if not getattr(e, "owner", None):
return
name = self.conn.atoms.get_name(e.selection)
self.selection[name]["owner"] = e.owner
self.selection[name]["selection"] = ""
self.convert_selection(e.selection)
hook.fire("selection_notify", name, self.selection[name])
def convert_selection(self, selection, _type="UTF8_STRING"):
TYPE = self.conn.atoms[_type]
self.conn.conn.core.ConvertSelection(self.selection_window.wid,
selection,
TYPE, selection,
xcffib.CurrentTime)
def handle_PropertyNotify(self, e):
name = self.conn.atoms.get_name(e.atom)
# it's the selection property
if name in ("PRIMARY", "CLIPBOARD"):
assert e.window == self.selection_window.wid
prop = self.selection_window.get_property(e.atom, "UTF8_STRING")
# If the selection property is None, it is unset, which means the
# clipboard is empty.
value = prop and prop.value.to_utf8() or six.u("")
self.selection[name]["selection"] = value
hook.fire("selection_change", name, self.selection[name])
def handle_EnterNotify(self, e):
if e.event in self.windowMap:
return True
s = self.find_screen(e.root_x, e.root_y)
if s:
self.toScreen(s.index, warp=False)
def handle_ClientMessage(self, event):
atoms = self.conn.atoms
opcode = event.type
data = event.data
# handle change of desktop
if atoms["_NET_CURRENT_DESKTOP"] == opcode:
index = data.data32[0]
try:
self.currentScreen.setGroup(self.groups[index])
except IndexError:
logger.info("Invalid Desktop Index: %s" % index)
def handle_KeyPress(self, e):
keysym = self.conn.code_to_syms[e.detail][0]
state = e.state
if self.numlockMask:
state = e.state | self.numlockMask
k = self.keyMap.get((keysym, state & self.validMask))
if not k:
logger.info("Ignoring unknown keysym: %s" % keysym)
return
for i in k.commands:
if i.check(self):
status, val = self.server.call(
(i.selectors, i.name, i.args, i.kwargs)
)
if status in (command.ERROR, command.EXCEPTION):
logger.error("KB command error %s: %s" % (i.name, val))
else:
return
def cmd_focus_by_click(self, e):
wnd = e.child or e.root
# Additional option for config.py
# Brings clicked window to front
if self.config.bring_front_click:
self.conn.conn.core.ConfigureWindow(
wnd,
xcffib.xproto.ConfigWindow.StackMode,
[xcffib.xproto.StackMode.Above]
)
if self.windowMap.get(wnd):
self.currentGroup.focus(self.windowMap.get(wnd), False)
self.windowMap.get(wnd).focus(False)
self.conn.conn.core.AllowEvents(xcffib.xproto.Allow.ReplayPointer, e.time)
self.conn.conn.flush()
def handle_ButtonPress(self, e):
button_code = e.detail
state = e.state
if self.numlockMask:
state = e.state | self.numlockMask
k = self.mouseMap.get(button_code)
for m in k:
if not m or m.modmask & self.validMask != state & self.validMask:
logger.info("Ignoring unknown button: %s" % button_code)
continue
if isinstance(m, Click):
for i in m.commands:
if i.check(self):
if m.focus == "before":
self.cmd_focus_by_click(e)
status, val = self.server.call(
(i.selectors, i.name, i.args, i.kwargs))
if m.focus == "after":
self.cmd_focus_by_click(e)
if status in (command.ERROR, command.EXCEPTION):
logger.error(
"Mouse command error %s: %s" % (i.name, val)
)
elif isinstance(m, Drag):
x = e.event_x
y = e.event_y
if m.start:
i = m.start
if m.focus == "before":
self.cmd_focus_by_click(e)
status, val = self.server.call(
(i.selectors, i.name, i.args, i.kwargs))
if status in (command.ERROR, command.EXCEPTION):
logger.error(
"Mouse command error %s: %s" % (i.name, val)
)
continue
else:
val = (0, 0)
if m.focus == "after":
self.cmd_focus_by_click(e)
self._drag = (x, y, val[0], val[1], m.commands)
self.root.grab_pointer(
True,
xcbq.ButtonMotionMask |
xcbq.AllButtonsMask |
xcbq.ButtonReleaseMask,
xcffib.xproto.GrabMode.Async,
xcffib.xproto.GrabMode.Async,
)
def handle_ButtonRelease(self, e):
button_code = e.detail
state = e.state & ~xcbq.AllButtonsMask
if self.numlockMask:
state = state | self.numlockMask
k = self.mouseMap.get(button_code)
for m in k:
if not m:
logger.info(
"Ignoring unknown button release: %s" % button_code
)
continue
if isinstance(m, Drag):
self._drag = None
self.root.ungrab_pointer()
def handle_MotionNotify(self, e):
if self._drag is None:
return
ox, oy, rx, ry, cmd = self._drag
dx = e.event_x - ox
dy = e.event_y - oy
if dx or dy:
for i in cmd:
if i.check(self):
status, val = self.server.call((
i.selectors,
i.name,
i.args + (rx + dx, ry + dy, e.event_x, e.event_y),
i.kwargs
))
if status in (command.ERROR, command.EXCEPTION):
logger.error(
"Mouse command error %s: %s" % (i.name, val)
)
def handle_ConfigureNotify(self, e):
"""
Handle xrandr events.
"""
screen = self.currentScreen
if e.window == self.root.wid and \
e.width != screen.width and \
e.height != screen.height:
screen.resize(0, 0, e.width, e.height)
def handle_ConfigureRequest(self, e):
# It's not managed, or not mapped, so we just obey it.
cw = xcffib.xproto.ConfigWindow
args = {}
if e.value_mask & cw.X:
args["x"] = max(e.x, 0)
if e.value_mask & cw.Y:
args["y"] = max(e.y, 0)
if e.value_mask & cw.Height:
args["height"] = max(e.height, 0)
if e.value_mask & cw.Width:
args["width"] = max(e.width, 0)
if e.value_mask & cw.BorderWidth:
args["borderwidth"] = max(e.border_width, 0)
w = xcbq.Window(self.conn, e.window)
w.configure(**args)
def handle_MappingNotify(self, e):
self.conn.refresh_keymap()
if e.request == xcffib.xproto.Mapping.Keyboard:
self.grabKeys()
def handle_MapRequest(self, e):
w = xcbq.Window(self.conn, e.window)
c = self.manage(w)
if c and (not c.group or not c.group.screen):
return
w.map()
def handle_DestroyNotify(self, e):
self.unmanage(e.window)
def handle_UnmapNotify(self, e):
if e.event != self.root.wid:
c = self.windowMap.get(e.window)
if c and getattr(c, "group", None):
try:
c.window.unmap()
c.state = window.WithdrawnState
except xcffib.xproto.WindowError:
# This means that the window has probably been destroyed,
# but we haven't yet seen the DestroyNotify (it is likely
# next in the queue). So, we just let these errors pass
# since the window is dead.
pass
self.unmanage(e.window)
def handle_ScreenChangeNotify(self, e):
hook.fire("screen_change", self, e)
def toScreen(self, n, warp=True):
"""
Have Qtile move to screen and put focus there
"""
if n >= len(self.screens):
return
old = self.currentScreen
self.currentScreen = self.screens[n]
if old != self.currentScreen:
hook.fire("current_screen_change")
self.currentGroup.focus(self.currentWindow, warp)
def moveToGroup(self, group):
"""
Create a group if it doesn't exist and move a windows there
"""
if self.currentWindow and group:
self.addGroup(group)
self.currentWindow.togroup(group)
def _items(self, name):
if name == "group":
return True, list(self.groupMap.keys())
elif name == "layout":
return True, list(range(len(self.currentGroup.layouts)))
elif name == "widget":
return False, list(self.widgetMap.keys())
elif name == "bar":
return False, [x.position for x in self.currentScreen.gaps]
elif name == "window":
return True, self.listWID()
elif name == "screen":
return True, list(range(len(self.screens)))
def _select(self, name, sel):
if name == "group":
if sel is None:
return self.currentGroup
else:
return self.groupMap.get(sel)
elif name == "layout":
if sel is None:
return self.currentGroup.layout
else:
return utils.lget(self.currentGroup.layouts, sel)
elif name == "widget":
return self.widgetMap.get(sel)
elif name == "bar":
return getattr(self.currentScreen, sel)
elif name == "window":
if sel is None:
return self.currentWindow
else:
return self.clientFromWID(sel)
elif name == "screen":
if sel is None:
return self.currentScreen
else:
return utils.lget(self.screens, sel)
def listWID(self):
return [i.window.wid for i in self.windowMap.values()]
def clientFromWID(self, wid):
for i in self.windowMap.values():
if i.window.wid == wid:
return i
return None
def call_soon(self, func, *args):
""" A wrapper for the event loop's call_soon which also flushes the X
event queue to the server after func is called. """
def f():
func(*args)
self.conn.flush()
self._eventloop.call_soon(f)
def call_soon_threadsafe(self, func, *args):
""" Another event loop proxy, see `call_soon`. """
def f():
func(*args)
self.conn.flush()
self._eventloop.call_soon_threadsafe(f)
def call_later(self, delay, func, *args):
""" Another event loop proxy, see `call_soon`. """
def f():
func(*args)
self.conn.flush()
self._eventloop.call_later(delay, f)
def run_in_executor(self, func, *args):
""" A wrapper for running a function in the event loop's default
executor. """
return self._eventloop.run_in_executor(None, func, *args)
def cmd_debug(self):
"""Set log level to DEBUG"""
logger.setLevel(logging.DEBUG)
logger.debug('Switching to DEBUG threshold')
def cmd_info(self):
"""Set log level to INFO"""
logger.setLevel(logging.INFO)
logger.info('Switching to INFO threshold')
def cmd_warning(self):
"""Set log level to WARNING"""
logger.setLevel(logging.WARNING)
logger.warning('Switching to WARNING threshold')
def cmd_error(self):
"""Set log level to ERROR"""
logger.setLevel(logging.ERROR)
logger.error('Switching to ERROR threshold')
def cmd_critical(self):
"""Set log level to CRITICAL"""
logger.setLevel(logging.CRITICAL)
logger.critical('Switching to CRITICAL threshold')
def cmd_pause(self):
"""Drops into pdb"""
import pdb
pdb.set_trace()
def cmd_groups(self):
"""
Return a dictionary containing information for all groups.
Example:
groups()
"""
return dict((i.name, i.info()) for i in self.groups)
def cmd_get_info(self):
x = {}
for i in self.groups:
x[i.name] = i.info()
return x
def cmd_list_widgets(self):
"""
List of all addressible widget names.
"""
return list(self.widgetMap.keys())
def cmd_to_layout_index(self, index, group=None):
"""
Switch to the layout with the given index in self.layouts.
:index Index of the layout in the list of layouts.
:group Group name. If not specified, the current group is assumed.
"""
if group:
group = self.groupMap.get(group)
else:
group = self.currentGroup
group.toLayoutIndex(index)
def cmd_next_layout(self, group=None):
"""
Switch to the next layout.
:group Group name. If not specified, the current group is assumed.
"""
if group:
group = self.groupMap.get(group)
else:
group = self.currentGroup
group.nextLayout()
def cmd_prev_layout(self, group=None):
"""
Switch to the prev layout.
:group Group name. If not specified, the current group is assumed.
"""
if group:
group = self.groupMap.get(group)
else:
group = self.currentGroup
group.prevLayout()
def cmd_screens(self):
"""
Return a list of dictionaries providing information on all screens.
"""
lst = []
for i in self.screens:
lst.append(dict(
index=i.index,
group=i.group.name if i.group is not None else None,
x=i.x,
y=i.y,
width=i.width,
height=i.height,
gaps=dict(
top=i.top.geometry() if i.top else None,
bottom=i.bottom.geometry() if i.bottom else None,
left=i.left.geometry() if i.left else None,
right=i.right.geometry() if i.right else None,
)
))
return lst
def cmd_simulate_keypress(self, modifiers, key):
"""
Simulates a keypress on the focused window.
:modifiers A list of modifier specification strings. Modifiers can
be one of "shift", "lock", "control" and "mod1" - "mod5".
:key Key specification.
Examples:
simulate_keypress(["control", "mod2"], "k")
"""
# FIXME: This needs to be done with sendevent, once we have that fixed.
keysym = xcbq.keysyms.get(key)
if keysym is None:
raise command.CommandError("Unknown key: %s" % key)
keycode = self.conn.first_sym_to_code[keysym]
class DummyEv(object):
pass
d = DummyEv()
d.detail = keycode
try:
d.state = utils.translateMasks(modifiers)
except KeyError as v:
return v.args[0]
self.handle_KeyPress(d)
def cmd_execute(self, cmd, args):
"""
Executes the specified command, replacing the current process.
"""
self.stop()
os.execv(cmd, args)
def cmd_restart(self):
"""
Restart qtile using the execute command.
"""
argv = [sys.executable] + sys.argv
if '--no-spawn' not in argv:
argv.append('--no-spawn')
buf = six.BytesIO()
try:
pickle.dump(QtileState(self), buf, protocol=0)
except:
logger.error("Unable to pickle qtile state")
argv = [s for s in argv if not s.startswith('--with-state')]
argv.append('--with-state=' + buf.getvalue().decode())
self.cmd_execute(sys.executable, argv)
def cmd_spawn(self, cmd):
"""
Run cmd in a shell.
cmd may be a string, which is parsed by shlex.split, or
a list (similar to subprocess.Popen).
Example:
spawn("firefox")
spawn(["xterm", "-T", "Temporary terminal"])
"""
if isinstance(cmd, six.string_types):
args = shlex.split(cmd)
else:
args = list(cmd)
r, w = os.pipe()
pid = os.fork()
if pid < 0:
os.close(r)
os.close(w)
return pid
if pid == 0:
os.close(r)
# close qtile's stdin, stdout, stderr so the called process doesn't
# pollute our xsession-errors.
os.close(0)
os.close(1)
os.close(2)
pid2 = os.fork()
if pid2 == 0:
os.close(w)
# Open /dev/null as stdin, stdout, stderr
try:
fd = os.open(os.devnull, os.O_RDWR)
except OSError:
# This shouldn't happen, catch it just in case
pass
else:
# For Python >=3.4, need to set file descriptor to inheritable
try:
os.set_inheritable(fd, True)
except AttributeError:
pass
# Again, this shouldn't happen, but we should just check
if fd > 0:
os.dup2(fd, 0)
os.dup2(fd, 1)
os.dup2(fd, 2)
try:
os.execvp(args[0], args)
except OSError as e:
logger.error("failed spawn: \"{0}\"\n{1}".format(cmd, e))
os._exit(1)
else:
# Here it doesn't matter if fork failed or not, we just write
# its return code and exit.
os.write(w, str(pid2).encode())
os.close(w)
# sys.exit raises SystemExit, which will then be caught by our
# top level catchall and we'll end up with two qtiles; os._exit
# actually calls exit.
os._exit(0)
else:
os.close(w)
os.waitpid(pid, 0)
# 1024 bytes should be enough for any pid. :)
pid = os.read(r, 1024)
os.close(r)
return int(pid)
def cmd_status(self):
"""
Return "OK" if Qtile is running.
"""
return "OK"
def cmd_sync(self):
"""
Sync the X display. Should only be used for development.
"""
self.conn.flush()
def cmd_to_screen(self, n):
"""
Warp focus to screen n, where n is a 0-based screen number.
Example:
to_screen(0)
"""
return self.toScreen(n)
def cmd_next_screen(self):
"""
Move to next screen
"""
return self.toScreen(
(self.screens.index(self.currentScreen) + 1) % len(self.screens)
)
def cmd_prev_screen(self):
"""
Move to the previous screen
"""
return self.toScreen(
(self.screens.index(self.currentScreen) - 1) % len(self.screens)
)
def cmd_windows(self):
"""
Return info for each client window.
"""
return [
i.info() for i in self.windowMap.values()
if not isinstance(i, window.Internal)
]
def cmd_internal_windows(self):
"""
Return info for each internal window (bars, for example).
"""
return [
i.info() for i in self.windowMap.values()
if isinstance(i, window.Internal)
]
def cmd_qtile_info(self):
"""
Returns a dictionary of info on the Qtile instance.
"""
return dict(socketname=self.fname)
def cmd_shutdown(self):
"""
Quit Qtile.
"""
self.stop()
def cmd_switch_groups(self, groupa, groupb):
"""
Switch position of groupa to groupb
"""
if groupa not in self.groupMap or groupb not in self.groupMap:
return
indexa = self.groups.index(self.groupMap[groupa])
indexb = self.groups.index(self.groupMap[groupb])
self.groups[indexa], self.groups[indexb] = \
self.groups[indexb], self.groups[indexa]
hook.fire("setgroup")
# update window _NET_WM_DESKTOP
for group in (self.groups[indexa], self.groups[indexb]):
for w in group.windows:
w.group = group
def find_window(self, wid):
window = self.windowMap.get(wid)
if window:
if not window.group.screen:
self.currentScreen.setGroup(window.group)
window.group.focus(window, False)
def cmd_findwindow(self, prompt="window", widget="prompt"):
mb = self.widgetMap.get(widget)
if not mb:
logger.error("No widget named '%s' present." % widget)
return
mb.startInput(
prompt,
self.find_window,
"window",
strict_completer=True
)
def cmd_next_urgent(self):
try:
nxt = [w for w in self.windowMap.values() if w.urgent][0]
nxt.group.cmd_toscreen()
nxt.group.focus(nxt)
except IndexError:
pass # no window had urgent set
def cmd_togroup(self, prompt="group", widget="prompt"):
"""
Move current window to the selected group in a propmt widget
prompt: Text with which to prompt user.
widget: Name of the prompt widget (default: "prompt").
"""
if not self.currentWindow:
logger.warning("No window to move")
return
mb = self.widgetMap.get(widget)
if not mb:
logger.error("No widget named '%s' present." % widget)
return
mb.startInput(prompt, self.moveToGroup, "group", strict_completer=True)
def cmd_switchgroup(self, prompt="group", widget="prompt"):
def f(group):
if group:
try:
self.groupMap[group].cmd_toscreen()
except KeyError:
logger.info("No group named '%s' present." % group)
pass
mb = self.widgetMap.get(widget)
if not mb:
logger.warning("No widget named '%s' present." % widget)
return
mb.startInput(prompt, f, "group", strict_completer=True)
def cmd_spawncmd(self, prompt="spawn", widget="prompt",
command="%s", complete="cmd"):
"""
Spawn a command using a prompt widget, with tab-completion.
prompt: Text with which to prompt user (default: "spawn: ").
widget: Name of the prompt widget (default: "prompt").
command: command template (default: "%s").
complete: Tab completion function (default: "cmd")
"""
def f(args):
if args:
self.cmd_spawn(command % args)
try:
mb = self.widgetMap[widget]
mb.startInput(prompt, f, complete)
except KeyError:
logger.error("No widget named '%s' present." % widget)
def cmd_qtilecmd(self, prompt="command",
widget="prompt", messenger="xmessage"):
"""
Execute a Qtile command using the client syntax.
Tab completeion aids navigation of the command tree.
prompt: Text to display at the prompt (default: "command: ").
widget: Name of the prompt widget (default: "prompt").
messenger: command to display output (default: "xmessage").
Set this to None to disable.
"""
def f(cmd):
if cmd:
# c here is used in eval() below
c = command.CommandRoot(self) # noqa
try:
cmd_arg = str(cmd).split(' ')
except AttributeError:
return
cmd_len = len(cmd_arg)
if cmd_len == 0:
logger.info('No command entered.')
return
try:
result = eval('c.%s' % (cmd))
except (
command.CommandError,
command.CommandException,
AttributeError) as err:
logger.error(err)
result = None
if result is not None:
from pprint import pformat
message = pformat(result)
if messenger:
self.cmd_spawn('%s "%s"' % (messenger, message))
logger.info(result)
mb = self.widgetMap[widget]
if not mb:
logger.error("No widget named %s present." % widget)
return
mb.startInput(prompt, f, "qsh")
def cmd_addgroup(self, group):
return self.addGroup(group)
def cmd_delgroup(self, group):
return self.delGroup(group)
def cmd_add_rule(self, match_args, rule_args, min_priorty=False):
"""
Add a dgroup rule, returns rule_id needed to remove it
param: match_args (config.Match arguments)
param: rule_args (config.Rule arguments)
param: min_priorty if the rule is added with minimun prioriry(last)
"""
if not self.dgroups:
logger.warning('No dgroups created')
return
match = Match(**match_args)
rule = Rule(match, **rule_args)
return self.dgroups.add_rule(rule, min_priorty)
def cmd_remove_rule(self, rule_id):
self.dgroups.remove_rule(rule_id)
def cmd_run_external(self, full_path):
def format_error(path, e):
s = """Can't call "main" from "{path}"\n\t{err_name}: {err}"""
return s.format(path=path, err_name=e.__class__.__name__, err=e)
module_name = os.path.splitext(os.path.basename(full_path))[0]
dir_path = os.path.dirname(full_path)
err_str = ""
local_stdout = six.BytesIO()
old_stdout = sys.stdout
sys.stdout = local_stdout
sys.exc_clear()
try:
module = _import_module(module_name, dir_path)
module.main(self)
except ImportError as e:
err_str += format_error(full_path, e)
except:
(exc_type, exc_value, exc_traceback) = sys.exc_info()
err_str += traceback.format_exc()
err_str += format_error(full_path, exc_type(exc_value))
finally:
sys.exc_clear()
sys.stdout = old_stdout
local_stdout.close()
return local_stdout.getvalue() + err_str
def cmd_hide_show_bar(self, position="all"):
"""
param: position one of: "top", "bottom", "left", "right" or "all"
"""
if position in ["top", "bottom", "left", "right"]:
bar = getattr(self.currentScreen, position)
if bar:
bar.show(not bar.is_show())
self.currentGroup.layoutAll()
else:
logger.warning(
"Not found bar in position '%s' for hide/show." % position)
elif position == "all":
screen = self.currentScreen
is_show = None
for bar in [screen.left, screen.right, screen.top, screen.bottom]:
if bar:
if is_show is None:
is_show = not bar.is_show()
bar.show(is_show)
if is_show is not None:
self.currentGroup.layoutAll()
else:
logger.warning("Not found bar for hide/show.")
else:
logger.error("Invalid position value:%s" % position)
def cmd_get_state(self):
buf = six.BytesIO()
pickle.dump(QtileState(self), buf, protocol=0)
state = buf.getvalue().decode()
logger.info('State = ')
logger.info(''.join(state.split('\n')))
return state
def cmd_tracemalloc_toggle(self):
if not tracemalloc.is_tracing():
tracemalloc.start()
else:
tracemalloc.stop()
def cmd_tracemalloc_dump(self):
if not tracemalloc:
logger.warning('No tracemalloc module')
raise command.CommandError("No tracemalloc module")
if not tracemalloc.is_tracing():
return [False, "Trace not started"]
cache_directory = get_cache_dir()
malloc_dump = os.path.join(cache_directory, "qtile_tracemalloc.dump")
tracemalloc.take_snapshot().dump(malloc_dump)
return [True, malloc_dump]<|fim▁end|> | file_name = os.path.join(dir_path, module_name) + '.py'
f = importlib.machinery.SourceFileLoader(module_name, file_name)
module = f.load_module()
return module |
<|file_name|>program.cpp<|end_file_name|><|fim▁begin|>/*
-------------------------------------------------------------
Copyright (c) MMXIII Atle Solbakken
[email protected]
-------------------------------------------------------------
This file is part of P* (P-star).
P* is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
P* is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with P*. If not, see <http://www.gnu.org/licenses/>.
-------------------------------------------------------------
*/
#include "program.h"
#include "parser.h"
#include "pragma.h"
#include "types.h"
#include "value.h"<|fim▁hole|>#include "namespace_session.h"
#include "io.h"
#include <utility>
#include <memory>
static mutex init_lock;
static bool initialized = false;
#ifndef WIN32
static list<wpl_module_loader> modules;
#endif
wpl_program::wpl_program(wpl_io &io, int argc, char **argv) :
parser(io, 0)
{
#ifndef WIN32
{
lock_guard<mutex> lock(init_lock);
if (!initialized) {
char *dummy;
modules.emplace_back(0, &dummy, "mysql");
initialized = true;
}
}
#endif /* WIN32 */
wpl_types_add_all_to_namespace(this);
wpl_pragma_add_all_to_namespace(this);
unique_ptr<wpl_value_array> val_argv (
new wpl_value_array (
wpl_type_global_string,
argc
)
);
for (int i = 0; i < argc; i++) {
val_argv->set (i, new wpl_value_string(argv[i]));
}
wpl_variable_holder new_variable ("argv", val_argv.release(), WPL_VARIABLE_ACCESS_PRIVATE);
new_variable.setStatic();
register_identifier(&new_variable);
#ifndef WIN32
for (wpl_module_loader &module : modules) {
insert_parent_namespace(module.get_namespace());
}
#endif
}
void wpl_program::parse_file (const char *filename) {
parser.parse_file(this, filename);
}
int wpl_program::run(wpl_io &io) {
int ret;
wpl_value *value;
wpl_value_return retval;
wpl_namespace_session nss;
copy_variables_to_namespace_session (&nss);
wpl_scene *main;
if (!(main = find_scene("main"))) {
throw runtime_error("Could not find 'SCENE main'");
}
wpl_value_int return_value;
wpl_block_state program_state(NULL, &io, this);
unique_ptr<wpl_state> main_state(main->new_state(&program_state, &io));
ret = main->run(main_state.get(), &return_value);
return return_value.get();
}<|fim▁end|> | #include "value_array.h"
#include "value_string.h"
#include "value_int.h" |
<|file_name|>load_results_from_file.py<|end_file_name|><|fim▁begin|>import numpy
from srxraylib.plot.gol import plot_image, plot
import sys
from comsyl.scripts.CompactAFReader import CompactAFReader
def plot_stack(mystack,what="intensity",title0="X",title1="Y",title2="Z"):
from silx.gui.plot.StackView import StackViewMainWindow
from silx.gui import qt
app = qt.QApplication(sys.argv[1:])
sv = StackViewMainWindow()
sv.setColormap("jet", autoscale=True)
if what == "intensity":
sv.setStack(numpy.absolute(mystack))
elif what == "real":
sv.setStack(numpy.real(mystack))
elif what == "imaginary":
sv.setStack(numpy.imag(mystack))
elif what == "phase":
sv.setStack(numpy.angle(mystack))
elif what == "phase_deg":
sv.setStack(numpy.angle(mystack,deg=True))
else:<|fim▁hole|>
app.exec_()
def load_stack(filename):
# filename = "/users/srio/OASYS_VE/comsyl_srio/calculations/new_u18_2m_1h_s2.5"
reader = CompactAFReader(filename)
print("File %s:" % filename)
print("contains")
print("%i modes" % reader.number_modes())
print("on the grid")
print("x: from %e to %e" % (reader.x_coordinates().min(), reader.x_coordinates().max()))
print("y: from %e to %e" % (reader.y_coordinates().min(), reader.y_coordinates().max()))
print("calculated at %f eV" % reader.photon_energy())
print("with total intensity in (maybe improper) normalization: %e" % reader.total_intensity().real.sum())
print("Occupation and max abs value of the mode")
x = reader.x_coordinates()
y = reader.y_coordinates()
eigenvalues = numpy.zeros(reader.number_modes())
mystack = numpy.zeros((reader.number_modes(),y.size,x.size),dtype=complex)
for i_mode in range(reader.number_modes()):
eigenvalues[i_mode] = reader.occupation(i_mode)
mode = reader.mode(i_mode)
mystack[i_mode,:,:] = mode.T
return x,y,mystack, eigenvalues
if __name__ == "__main__":
h,v,mystack, occupation = load_stack("/users/srio/OASYS_VE/comsyl_srio/calculations/new_u18_2m_1h_s2.5")
plot_stack(mystack,what="intensity", title0="Mode index",
title1="V from %3.2f to %3.2f um"%(1e3*v.min(),1e3*v.max()),
title2="H from %3.2f to %3.2f um"%(1e3*h.min(),1e3*h.max()))
plot(numpy.arange(occupation.size),occupation)<|fim▁end|> | raise Exception("Undefined label "+what)
sv.setLabels([title0,title1,title2])
sv.show() |
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst
<|fim▁hole|>import pytest
import numpy as np
from astropy.cosmology.utils import inf_like, vectorize_if_needed, vectorize_redshift_method
from astropy.utils.exceptions import AstropyDeprecationWarning
def test_vectorize_redshift_method():
"""Test :func:`astropy.cosmology.utils.vectorize_redshift_method`."""
class Class:
@vectorize_redshift_method
def method(self, z):
return z
c = Class()
assert hasattr(c.method, "__vectorized__")
assert isinstance(c.method.__vectorized__, np.vectorize)
# calling with Number
assert c.method(1) == 1
assert isinstance(c.method(1), int)
# calling with a numpy scalar
assert c.method(np.float64(1)) == np.float64(1)
assert isinstance(c.method(np.float64(1)), np.float64)
# numpy array
assert all(c.method(np.array([1, 2])) == np.array([1, 2]))
assert isinstance(c.method(np.array([1, 2])), np.ndarray)
# non-scalar
assert all(c.method([1, 2]) == np.array([1, 2]))
assert isinstance(c.method([1, 2]), np.ndarray)
def test_vectorize_if_needed():
"""
Test :func:`astropy.cosmology.utils.vectorize_if_needed`.
There's no need to test 'veckw' because that is directly pasased to
`numpy.vectorize` which thoroughly tests the various inputs.
"""
func = lambda x: x ** 2
with pytest.warns(AstropyDeprecationWarning):
# not vectorized
assert vectorize_if_needed(func, 2) == 4
# vectorized
assert all(vectorize_if_needed(func, [2, 3]) == [4, 9])
@pytest.mark.parametrize("arr, expected",
[(0.0, inf), # float scalar
(1, inf), # integer scalar should give float output
([0.0, 1.0, 2.0, 3.0], (inf, inf, inf, inf)),
([0, 1, 2, 3], (inf, inf, inf, inf)), # integer list
])
def test_inf_like(arr, expected):
"""
Test :func:`astropy.cosmology.utils.inf_like`.
All inputs should give a float output.
These tests are also in the docstring, but it's better to have them also
in one consolidated location.
"""
with pytest.warns(AstropyDeprecationWarning):
assert np.all(inf_like(arr) == expected)<|fim▁end|> | from math import inf
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages
from orotangi import __version__ as version
install_requires = [
'Django==1.11.18',
'djangorestframework==3.6.2',
'django-cors-headers==2.0.2',
'django-filter==1.0.2',
'python-dateutil==2.6.0'
]
setup(
name='orotangi',
version=version,
description='Your Thoughts, Everywhere',
author='FoxMaSk',
maintainer='FoxMaSk',
author_email='[email protected]',
maintainer_email='[email protected]',
url='https://github.com/foxmask/orotangi',
download_url="https://github.com/foxmask/orotangi/"
"archive/orotangi-" + version + ".zip",
packages=find_packages(exclude=['orotangi/local_settings']),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',<|fim▁hole|> 'Framework :: Django :: 1.11',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Topic :: Internet',
'Topic :: Communications',
'Topic :: Database',
],
install_requires=install_requires,
include_package_data=True,
)<|fim▁end|> | |
<|file_name|>scriptStage.ts<|end_file_name|><|fim▁begin|>import { module } from 'angular';
import { Registry } from 'core/registry';
import { AuthenticationService } from 'core/authentication';
import { ExecutionDetailsTasks } from '../common';
import { ScriptStageConfig, validate } from './ScriptStageConfig';
import { ScriptExecutionDetails } from './ScriptExecutionDetails';
export const SCRIPT_STAGE = 'spinnaker.core.pipeline.stage.scriptStage';
module(SCRIPT_STAGE, []).config(() => {
Registry.pipeline.registerStage({
label: 'Script',
description: 'Runs a script',
supportsCustomTimeout: true,
key: 'script',
restartable: true,
defaults: {
waitForCompletion: true,
failPipeline: true,
get user() {
return AuthenticationService.getAuthenticatedUser().name;
},
},
component: ScriptStageConfig,
executionDetailsSections: [ScriptExecutionDetails, ExecutionDetailsTasks],
strategy: true,
validateFn: validate,<|fim▁hole|>});<|fim▁end|> | }); |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.