prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Response.java<|end_file_name|><|fim▁begin|>/*
* www.yiji.com Inc.
* Copyright (c) 2016 All Rights Reserved
*/
package com.falcon.suitagent.web;
/*
* 修订记录:
* [email protected] 2016-07-26 13:54 创建
*/
import com.falcon.suitagent.config.AgentConfiguration;
import com.falcon.suitagent.plugins.metrics.MetricsCommon;
import lombok.extern.slf4j.Slf4j;
import java.io.IOException;
import java.io.OutputStream;
import java.util.List;
/**
* @author [email protected]
*/
@Slf4j
public class Response {
private static final int BUFFER_SIZE = 1024;
Request request;
OutputStream output;
public Response(OutputStream output) {
this.output = output;
}
public void setRequest(Request request) {
this.request = request;
}
public void send_404() throws IOException {
String errorMessage = "HTTP/1.1 404 File Not Found\r\n" +
"Content-Type: text/html\r\n" +
"Content-Length: 23\r\n" +
"\r\n" +
"<h1>File Not Found</h1>";
output.write(errorMessage.getBytes());
}
public void send(String html) throws IOException {
String errorMessage = "HTTP/1.1 200 \r\n" +
"Content-Type: text/html\r\n" +
"Content-Length: " + html.length() + "\r\n" +
"\r\n" +
html;
output.write(errorMessage.getBytes());
}
public void doRequest() throws IOException {
List<String> urlPath = request.getUrlPath();
if(urlPath.size() >= 1 && "mock".equals(urlPath.get(0))){
if(urlPath.size() < 2){
send("error! must have option");
return;
}
String msg = "";
String option = urlPath.get(1);
if("list".equals(option)){
msg = MetricsCommon.getMockServicesList();
}else if(urlPath.size() != 4){
send("<h3>error! url path must be match : /mock/{option}/{serviceType}/{serviceName}</h3>");
}else{
String type = urlPath.get(2);
String server = urlPath.get(3);
if("add".equals(option)){
MetricsCommon.addMockService(type,server);
msg = String.format("<h2>add mock server %s:%s success</h2>",type,server);
}else if("remove".equals(option)){
MetricsCommon.removeMockService(type,server);
msg = String.format("<h2>remove mock server %s:%s success</h2>",type,server);
}
}
send(msg);<|fim▁hole|> send("Version " + AgentConfiguration.VERSION);
}else{
send_404();
}
}
}<|fim▁end|> | }else if(urlPath.size() >= 1 && "version".equals(urlPath.get(0))){ |
<|file_name|>navtree.js<|end_file_name|><|fim▁begin|>var NAVTREE =
[
[ "game_of_life", "index.html", [
[ "game_of_life", "md__r_e_a_d_m_e.html", null ],
[ "Classes", null, [
[ "Class List", "annotated.html", "annotated" ],
[ "Class Index", "classes.html", null ],
[ "Class Members", "functions.html", [
[ "All", "functions.html", null ],
[ "Functions", "functions_func.html", null ]
] ]
] ],
[ "Files", null, [
[ "File List", "files.html", "files" ],
[ "File Members", "globals.html", [
[ "All", "globals.html", null ],
[ "Functions", "globals_func.html", null ],
[ "Variables", "globals_vars.html", null ]
] ]
] ]
] ]
];
var NAVTREEINDEX =
[
"annotated.html"
];
var SYNCONMSG = 'click to disable panel synchronisation';
var SYNCOFFMSG = 'click to enable panel synchronisation';
var navTreeSubIndices = new Array();
function getData(varName)
{
var i = varName.lastIndexOf('/');
var n = i>=0 ? varName.substring(i+1) : varName;
return eval(n.replace(/\-/g,'_'));
}
function stripPath(uri)
{
return uri.substring(uri.lastIndexOf('/')+1);
}
function stripPath2(uri)
{
var i = uri.lastIndexOf('/');
var s = uri.substring(i+1);
var m = uri.substring(0,i+1).match(/\/d\w\/d\w\w\/$/);
return m ? uri.substring(i-6) : s;
}
function localStorageSupported()
{
try {
return 'localStorage' in window && window['localStorage'] !== null && window.localStorage.getItem;
}
catch(e) {
return false;
}
}
function storeLink(link)
{
if (!$("#nav-sync").hasClass('sync') && localStorageSupported()) {
window.localStorage.setItem('navpath',link);
}
}
function deleteLink()
{
if (localStorageSupported()) {
window.localStorage.setItem('navpath','');
}
}
function cachedLink()
{
if (localStorageSupported()) {
return window.localStorage.getItem('navpath');
} else {
return '';
}
}
function getScript(scriptName,func,show)
{
var head = document.getElementsByTagName("head")[0];
var script = document.createElement('script');
script.id = scriptName;
script.type = 'text/javascript';
script.onload = func;
script.src = scriptName+'.js';
if ($.browser.msie && $.browser.version<=8) {
// script.onload does not work with older versions of IE
script.onreadystatechange = function() {
if (script.readyState=='complete' || script.readyState=='loaded') {
func(); if (show) showRoot();
}
}
}
head.appendChild(script);
}
function createIndent(o,domNode,node,level)
{
var level=-1;
var n = node;
while (n.parentNode) { level++; n=n.parentNode; }
var imgNode = document.createElement("img");
imgNode.style.paddingLeft=(16*level).toString()+'px';
imgNode.width = 16;
imgNode.height = 22;
imgNode.border = 0;
if (node.childrenData) {
node.plus_img = imgNode;
node.expandToggle = document.createElement("a");
node.expandToggle.href = "javascript:void(0)";
node.expandToggle.onclick = function() {
if (node.expanded) {
$(node.getChildrenUL()).slideUp("fast");
node.plus_img.src = node.relpath+"ftv2pnode.png";
node.expanded = false;
} else {
expandNode(o, node, false, false);
}
}
node.expandToggle.appendChild(imgNode);
domNode.appendChild(node.expandToggle);
imgNode.src = node.relpath+"ftv2pnode.png";
} else {
imgNode.src = node.relpath+"ftv2node.png";
domNode.appendChild(imgNode);
}
}
var animationInProgress = false;
function gotoAnchor(anchor,aname,updateLocation)
{
var pos, docContent = $('#doc-content');
if (anchor.parent().attr('class')=='memItemLeft' ||
anchor.parent().attr('class')=='fieldtype' ||
anchor.parent().is(':header'))
{
pos = anchor.parent().position().top;
} else if (anchor.position()) {
pos = anchor.position().top;
}
if (pos) {
var dist = Math.abs(Math.min(
pos-docContent.offset().top,
docContent[0].scrollHeight-
docContent.height()-docContent.scrollTop()));
animationInProgress=true;
docContent.animate({
scrollTop: pos + docContent.scrollTop() - docContent.offset().top
},Math.max(50,Math.min(500,dist)),function(){
if (updateLocation) window.location.href=aname;
animationInProgress=false;
});
}
}
function newNode(o, po, text, link, childrenData, lastNode)
{
var node = new Object();
node.children = Array();
node.childrenData = childrenData;
node.depth = po.depth + 1;
node.relpath = po.relpath;
node.isLast = lastNode;
node.li = document.createElement("li");
po.getChildrenUL().appendChild(node.li);
node.parentNode = po;
node.itemDiv = document.createElement("div");
node.itemDiv.className = "item";
node.labelSpan = document.createElement("span");
node.labelSpan.className = "label";
createIndent(o,node.itemDiv,node,0);
node.itemDiv.appendChild(node.labelSpan);
node.li.appendChild(node.itemDiv);
var a = document.createElement("a");
node.labelSpan.appendChild(a);
node.label = document.createTextNode(text);
node.expanded = false;
a.appendChild(node.label);
if (link) {
var url;
if (link.substring(0,1)=='^') {
url = link.substring(1);
link = url;
} else {
url = node.relpath+link;
}
a.className = stripPath(link.replace('#',':'));
if (link.indexOf('#')!=-1) {
var aname = '#'+link.split('#')[1];
var srcPage = stripPath($(location).attr('pathname'));
var targetPage = stripPath(link.split('#')[0]);
a.href = srcPage!=targetPage ? url : "javascript:void(0)";
a.onclick = function(){
storeLink(link);
if (!$(a).parent().parent().hasClass('selected'))
{
$('.item').removeClass('selected');
$('.item').removeAttr('id');
$(a).parent().parent().addClass('selected');
$(a).parent().parent().attr('id','selected');
}
var anchor = $(aname);
gotoAnchor(anchor,aname,true);
};
} else {
a.href = url;
a.onclick = function() { storeLink(link); }
}
} else {
if (childrenData != null)
{
a.className = "nolink";
a.href = "javascript:void(0)";
a.onclick = node.expandToggle.onclick;
}
}
node.childrenUL = null;
node.getChildrenUL = function() {
if (!node.childrenUL) {
node.childrenUL = document.createElement("ul");
node.childrenUL.className = "children_ul";
node.childrenUL.style.display = "none";
node.li.appendChild(node.childrenUL);
}
return node.childrenUL;
};
return node;
}
function showRoot()
{
var headerHeight = $("#top").height();
var footerHeight = $("#nav-path").height();
var windowHeight = $(window).height() - headerHeight - footerHeight;
(function (){ // retry until we can scroll to the selected item
try {
var navtree=$('#nav-tree');
navtree.scrollTo('#selected',0,{offset:-windowHeight/2});
} catch (err) {
setTimeout(arguments.callee, 0);
}
})();
}
function expandNode(o, node, imm, showRoot)
{
if (node.childrenData && !node.expanded) {
if (typeof(node.childrenData)==='string') {
var varName = node.childrenData;
getScript(node.relpath+varName,function(){
node.childrenData = getData(varName);
expandNode(o, node, imm, showRoot);
}, showRoot);
} else {
if (!node.childrenVisited) {
getNode(o, node);
} if (imm || ($.browser.msie && $.browser.version>8)) {
// somehow slideDown jumps to the start of tree for IE9 :-(
$(node.getChildrenUL()).show();
} else {
$(node.getChildrenUL()).slideDown("fast");
}
if (node.isLast) {
node.plus_img.src = node.relpath+"ftv2mlastnode.png";
} else {
node.plus_img.src = node.relpath+"ftv2mnode.png";
}
node.expanded = true;
}
}
}
function glowEffect(n,duration)
{
n.addClass('glow').delay(duration).queue(function(next){
$(this).removeClass('glow');next();
});
}
function highlightAnchor()
{
var aname = $(location).attr('hash');
var anchor = $(aname);
if (anchor.parent().attr('class')=='memItemLeft'){
var rows = $('.memberdecls tr[class$="'+
window.location.hash.substring(1)+'"]');
glowEffect(rows.children(),300); // member without details
} else if (anchor.parents().slice(2).prop('tagName')=='TR') {
glowEffect(anchor.parents('div.memitem'),1000); // enum value
} else if (anchor.parent().attr('class')=='fieldtype'){
glowEffect(anchor.parent().parent(),1000); // struct field
} else if (anchor.parent().is(":header")) {
glowEffect(anchor.parent(),1000); // section header
} else {
glowEffect(anchor.next(),1000); // normal member
}
gotoAnchor(anchor,aname,false);
}
function selectAndHighlight(hash,n)
{
var a;
if (hash) {
var link=stripPath($(location).attr('pathname'))+':'+hash.substring(1);
a=$('.item a[class$="'+link+'"]');
}
if (a && a.length) {
a.parent().parent().addClass('selected');
a.parent().parent().attr('id','selected');
highlightAnchor();
} else if (n) {
$(n.itemDiv).addClass('selected');
$(n.itemDiv).attr('id','selected');
}
if ($('#nav-tree-contents .item:first').hasClass('selected')) {
$('#nav-sync').css('top','30px');
} else {
$('#nav-sync').css('top','5px');
}
showRoot();
}
function showNode(o, node, index, hash)
{
if (node && node.childrenData) {
if (typeof(node.childrenData)==='string') {
var varName = node.childrenData;
getScript(node.relpath+varName,function(){
node.childrenData = getData(varName);
showNode(o,node,index,hash);
},true);
} else {
if (!node.childrenVisited) {
getNode(o, node);
}
$(node.getChildrenUL()).show();
if (node.isLast) {
node.plus_img.src = node.relpath+"ftv2mlastnode.png";
} else {
node.plus_img.src = node.relpath+"ftv2mnode.png";
}
node.expanded = true;
var n = node.children[o.breadcrumbs[index]];
if (index+1<o.breadcrumbs.length) {
showNode(o,n,index+1,hash);
} else {
if (typeof(n.childrenData)==='string') {
var varName = n.childrenData;
getScript(n.relpath+varName,function(){
n.childrenData = getData(varName);
node.expanded=false;
showNode(o,node,index,hash); // retry with child node expanded
},true);
} else {
var rootBase = stripPath(o.toroot.replace(/\..+$/, ''));
if (rootBase=="index" || rootBase=="pages" || rootBase=="search") {<|fim▁hole|> selectAndHighlight(hash,n);
}
}
}
} else {
selectAndHighlight(hash);
}
}
function getNode(o, po)
{
po.childrenVisited = true;
var l = po.childrenData.length-1;
for (var i in po.childrenData) {
var nodeData = po.childrenData[i];
po.children[i] = newNode(o, po, nodeData[0], nodeData[1], nodeData[2],
i==l);
}
}
function gotoNode(o,subIndex,root,hash,relpath)
{
var nti = navTreeSubIndices[subIndex][root+hash];
o.breadcrumbs = $.extend(true, [], nti ? nti : navTreeSubIndices[subIndex][root]);
if (!o.breadcrumbs && root!=NAVTREE[0][1]) { // fallback: show index
navTo(o,NAVTREE[0][1],"",relpath);
$('.item').removeClass('selected');
$('.item').removeAttr('id');
}
if (o.breadcrumbs) {
o.breadcrumbs.unshift(0); // add 0 for root node
showNode(o, o.node, 0, hash);
}
}
function navTo(o,root,hash,relpath)
{
var link = cachedLink();
if (link) {
var parts = link.split('#');
root = parts[0];
if (parts.length>1) hash = '#'+parts[1];
else hash='';
}
if (hash.match(/^#l\d+$/)) {
var anchor=$('a[name='+hash.substring(1)+']');
glowEffect(anchor.parent(),1000); // line number
hash=''; // strip line number anchors
//root=root.replace(/_source\./,'.'); // source link to doc link
}
var url=root+hash;
var i=-1;
while (NAVTREEINDEX[i+1]<=url) i++;
if (i==-1) { i=0; root=NAVTREE[0][1]; } // fallback: show index
if (navTreeSubIndices[i]) {
gotoNode(o,i,root,hash,relpath)
} else {
getScript(relpath+'navtreeindex'+i,function(){
navTreeSubIndices[i] = eval('NAVTREEINDEX'+i);
if (navTreeSubIndices[i]) {
gotoNode(o,i,root,hash,relpath);
}
},true);
}
}
function showSyncOff(n,relpath)
{
n.html('<img src="'+relpath+'sync_off.png" title="'+SYNCOFFMSG+'"/>');
}
function showSyncOn(n,relpath)
{
n.html('<img src="'+relpath+'sync_on.png" title="'+SYNCONMSG+'"/>');
}
function toggleSyncButton(relpath)
{
var navSync = $('#nav-sync');
if (navSync.hasClass('sync')) {
navSync.removeClass('sync');
showSyncOff(navSync,relpath);
storeLink(stripPath2($(location).attr('pathname'))+$(location).attr('hash'));
} else {
navSync.addClass('sync');
showSyncOn(navSync,relpath);
deleteLink();
}
}
function initNavTree(toroot,relpath)
{
var o = new Object();
o.toroot = toroot;
o.node = new Object();
o.node.li = document.getElementById("nav-tree-contents");
o.node.childrenData = NAVTREE;
o.node.children = new Array();
o.node.childrenUL = document.createElement("ul");
o.node.getChildrenUL = function() { return o.node.childrenUL; };
o.node.li.appendChild(o.node.childrenUL);
o.node.depth = 0;
o.node.relpath = relpath;
o.node.expanded = false;
o.node.isLast = true;
o.node.plus_img = document.createElement("img");
o.node.plus_img.src = relpath+"ftv2pnode.png";
o.node.plus_img.width = 16;
o.node.plus_img.height = 22;
if (localStorageSupported()) {
var navSync = $('#nav-sync');
if (cachedLink()) {
showSyncOff(navSync,relpath);
navSync.removeClass('sync');
} else {
showSyncOn(navSync,relpath);
}
navSync.click(function(){ toggleSyncButton(relpath); });
}
navTo(o,toroot,window.location.hash,relpath);
$(window).bind('hashchange', function(){
if (window.location.hash && window.location.hash.length>1){
var a;
if ($(location).attr('hash')){
var clslink=stripPath($(location).attr('pathname'))+':'+
$(location).attr('hash').substring(1);
a=$('.item a[class$="'+clslink+'"]');
}
if (a==null || !$(a).parent().parent().hasClass('selected')){
$('.item').removeClass('selected');
$('.item').removeAttr('id');
}
var link=stripPath2($(location).attr('pathname'));
navTo(o,link,$(location).attr('hash'),relpath);
} else if (!animationInProgress) {
$('#doc-content').scrollTop(0);
$('.item').removeClass('selected');
$('.item').removeAttr('id');
navTo(o,toroot,window.location.hash,relpath);
}
})
$(window).load(showRoot);
}<|fim▁end|> | expandNode(o, n, true, true);
} |
<|file_name|>CoarseFundamentalTop3Algorithm.py<|end_file_name|><|fim▁begin|># QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from clr import AddReference
AddReference("System.Core")
AddReference("QuantConnect.Common")
AddReference("QuantConnect.Algorithm")
from System import *
from QuantConnect import *
from QuantConnect.Algorithm import QCAlgorithm
from QuantConnect.Data.UniverseSelection import *
### <summary>
### Demonstration of using coarse and fine universe selection together to filter down a smaller universe of stocks.
### </summary>
### <meta name="tag" content="using data" />
### <meta name="tag" content="universes" />
### <meta name="tag" content="coarse universes" />
### <meta name="tag" content="fine universes" />
class CoarseFundamentalTop3Algorithm(QCAlgorithm):
def Initialize(self):
'''Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
self.SetStartDate(2014,3,24) #Set Start Date
self.SetEndDate(2014,4,7) #Set End Date
self.SetCash(50000) #Set Strategy Cash
# what resolution should the data *added* to the universe be?
self.UniverseSettings.Resolution = Resolution.Daily
# this add universe method accepts a single parameter that is a function that
# accepts an IEnumerable<CoarseFundamental> and returns IEnumerable<Symbol>
self.AddUniverse(self.CoarseSelectionFunction)
self.__numberOfSymbols = 3
self._changes = None
# sort the data by daily dollar volume and take the top 'NumberOfSymbols'
def CoarseSelectionFunction(self, coarse):
# sort descending by daily dollar volume
sortedByDollarVolume = sorted(coarse, key=lambda x: x.DollarVolume, reverse=True)
# return the symbol objects of the top entries from our sorted collection
return [ x.Symbol for x in sortedByDollarVolume[:self.__numberOfSymbols] ]
def OnData(self, data):
self.Log(f"OnData({self.UtcTime}): Keys: {', '.join([key.Value for key in data.Keys])}")
# if we have no changes, do nothing
if self._changes is None: return
# liquidate removed securities
for security in self._changes.RemovedSecurities:<|fim▁hole|> self.Liquidate(security.Symbol)
# we want 1/N allocation in each security in our universe
for security in self._changes.AddedSecurities:
self.SetHoldings(security.Symbol, 1 / self.__numberOfSymbols)
self._changes = None
# this event fires whenever we have changes to our universe
def OnSecuritiesChanged(self, changes):
self._changes = changes
self.Log(f"OnSecuritiesChanged({self.UtcTime}):: {changes}")
def OnOrderEvent(self, fill):
self.Log(f"OnOrderEvent({self.UtcTime}):: {fill}")<|fim▁end|> | if security.Invested: |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use crate::{config::WorkMode, env::Env, file_saver::*};
use std::path::Path;
mod alias;
mod child_properties;
mod constants;
mod doc;
mod enums;
mod flags;
pub mod function;
mod function_body_chunk;
mod functions;
mod general;
mod object;
mod objects;<|fim▁hole|>mod parameter;
mod properties;
mod property_body;
mod record;
mod records;
mod return_value;
mod signal;
mod signal_body;
mod sys;
mod trait_impls;
mod trampoline;
mod trampoline_from_glib;
mod trampoline_to_glib;
pub mod translate_from_glib;
pub mod translate_to_glib;
pub fn generate(env: &Env) {
match env.config.work_mode {
WorkMode::Normal => normal_generate(env),
WorkMode::Sys => sys::generate(env),
WorkMode::Doc => doc::generate(env),
WorkMode::DisplayNotBound => {}
}
}
fn normal_generate(env: &Env) {
let mut mod_rs: Vec<String> = Vec::new();
let mut traits: Vec<String> = Vec::new();
let root_path = env.config.auto_path.as_path();
generate_single_version_file(env);
objects::generate(env, root_path, &mut mod_rs, &mut traits);
records::generate(env, root_path, &mut mod_rs);
enums::generate(env, root_path, &mut mod_rs);
flags::generate(env, root_path, &mut mod_rs);
alias::generate(env, root_path, &mut mod_rs);
functions::generate(env, root_path, &mut mod_rs);
constants::generate(env, root_path, &mut mod_rs);
generate_mod_rs(env, root_path, &mod_rs, &traits);
}
pub fn generate_mod_rs(env: &Env, root_path: &Path, mod_rs: &[String], traits: &[String]) {
let path = root_path.join("mod.rs");
save_to_file(path, env.config.make_backup, |w| {
general::start_comments(w, &env.config)?;
general::write_vec(w, mod_rs)?;
writeln!(w)?;
writeln!(w, "#[doc(hidden)]")?;
writeln!(w, "pub mod traits {{")?;
general::write_vec(w, traits)?;
writeln!(w, "}}")
});
}
pub fn generate_single_version_file(env: &Env) {
if let Some(ref path) = env.config.single_version_file {
save_to_file(path, env.config.make_backup, |w| {
general::single_version_file(w, &env.config)
});
}
}<|fim▁end|> | |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# CoderDojo Twin Cities Python for Minecraft documentation build configuration file, created by
# sphinx-quickstart on Fri Oct 24 00:52:04 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.todo']
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General information about the project.<|fim▁hole|>copyright = u'by multiple <a href="https://github.com/CoderDojoTC/python-minecraft/graphs/contributors">contributors</a>'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'CoderDojoTwinCitiesPythonforMinecraftdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'CoderDojoTwinCitiesPythonforMinecraft.tex', u'CoderDojo Twin Cities Python for Minecraft Documentation',
u'Mike McCallister', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'coderdojotwincitiespythonforminecraft', u'CoderDojo Twin Cities Python for Minecraft Documentation',
[u'Mike McCallister'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'CoderDojoTwinCitiesPythonforMinecraft', u'CoderDojo Twin Cities Python for Minecraft Documentation',
u'Mike McCallister', 'CoderDojoTwinCitiesPythonforMinecraft', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False<|fim▁end|> | project = u'CoderDojo Twin Cities Python for Minecraft' |
<|file_name|>publicationfolder.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from ._base import IIngestableFolder, Ingestor, IngestableFolderView
from .interfaces import IPublication
from five import grok
class IPublicationFolder(IIngestableFolder):
u'''Folder containing publications.'''
class PublicationIngestor(Ingestor):
u'''RDF ingestor for publication.'''
grok.context(IPublicationFolder)
def getContainedObjectInterface(self):
return IPublication
class View(IngestableFolderView):
u'''View for an publication folder'''
grok.context(IPublicationFolder)<|fim▁end|> | # encoding: utf-8
u'''MCL — Publication Folder'''
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># AsteriskLint -- an Asterisk PBX config syntax checker
# Copyright (C) 2015-2016 Walter Doekes, OSSO B.V.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.<|fim▁hole|>from .func_odbc import FuncOdbcAggregator
class FileConfigParser(ConfigAggregator, FileReader):
pass
class FileDialplanParser(DialplanAggregator, FileReader):
pass
class FileFuncOdbcParser(FuncOdbcAggregator, FileReader):
pass<|fim▁end|> | from .config import ConfigAggregator
from .dialplan import DialplanAggregator
from .file import FileReader |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>//! Validate checks some basic safety conditions.
use super::{NormError, NormResult};
use super::norm_util::{self, Symbols};
use grammar::consts::*;
use grammar::parse_tree::*;
use grammar::repr as r;
use string_cache::DefaultAtom as Atom;
use collections::{set, Multimap};
use util::Sep;
#[cfg(test)]
mod test;
pub fn validate(grammar: &Grammar) -> NormResult<()> {
let match_token: Option<&MatchToken> = grammar
.items
.iter()
.filter_map(|item| item.as_match_token())
.next();
let extern_token: Option<&ExternToken> = grammar
.items
.iter()
.filter_map(|item| item.as_extern_token())
.next();
let validator = Validator {
grammar: grammar,
match_token: match_token,
extern_token: extern_token,
};
validator.validate()
}
struct Validator<'grammar> {
grammar: &'grammar Grammar,
match_token: Option<&'grammar MatchToken>,
extern_token: Option<&'grammar ExternToken>,
}
impl<'grammar> Validator<'grammar> {
fn validate(&self) -> NormResult<()> {
let allowed_names = vec![
Atom::from(LALR),
Atom::from(TABLE_DRIVEN),
Atom::from(RECURSIVE_ASCENT),
Atom::from(TEST_ALL),
];
for annotation in &self.grammar.annotations {
if !allowed_names.contains(&annotation.id) {
return_err!(
annotation.id_span,
"unrecognized annotation `{}`",
annotation.id
);
}
}
for item in &self.grammar.items {
match *item {
GrammarItem::Use(..) => {}
GrammarItem::MatchToken(ref data) => {
if data.span != self.match_token.unwrap().span {
return_err!(data.span, "multiple match definitions are not permitted");
}
// Only error if a custom lexer is specified, having a custom types is ok
if let Some(d) = self.extern_token {
if d.enum_token.is_some() {
return_err!(
d.span,
"extern (with custom tokens) and match definitions are mutually exclusive");
}
}
// Ensure that the catch all is final item of final block
for (contents_idx, match_contents) in data.contents.iter().enumerate() {
for (item_idx, item) in match_contents.items.iter().enumerate() {
if item.is_catch_all()
&& (contents_idx != &data.contents.len() - 1
|| item_idx != &match_contents.items.len() - 1)
{
return_err!(item.span(), "Catch all must be final item");
} else {
println!("ok");
}
}
}
}
GrammarItem::ExternToken(ref data) => {
if data.span != self.extern_token.unwrap().span {
return_err!(data.span, "multiple extern definitions are not permitted");
}
// Only error if a custom lexer is specified, having a custom types is ok
if let Some(d) = self.match_token {
if data.enum_token.is_some() {
return_err!(
d.span,
"match and extern (with custom tokens) definitions are mutually exclusive");
}
}
let allowed_names = vec![Atom::from(LOCATION), Atom::from(ERROR)];
let mut new_names = set();
for associated_type in &data.associated_types {
if !allowed_names.contains(&associated_type.type_name) {
return_err!(
associated_type.type_span,
"associated type `{}` not recognized, \
try one of the following: {}",
associated_type.type_name,
Sep(", ", &allowed_names)
);
} else if !new_names.insert(associated_type.type_name.clone()) {
return_err!(
associated_type.type_span,
"associated type `{}` already specified",
associated_type.type_name
);
}
}
}
GrammarItem::Nonterminal(ref data) => {
if data.visibility.is_pub() && !data.args.is_empty() {
return_err!(data.span, "macros cannot be marked public");
}
let inline_annotation = Atom::from(INLINE);
let known_annotations = vec![inline_annotation.clone()];
let mut found_annotations = set();
for annotation in &data.annotations {
if !known_annotations.contains(&annotation.id) {
return_err!(
annotation.id_span,
"unrecognized annotation `{}`",
annotation.id
);
} else if !found_annotations.insert(annotation.id.clone()) {
return_err!(
annotation.id_span,
"duplicate annotation `{}`",
annotation.id
);
} else if annotation.id == inline_annotation && data.visibility.is_pub() {
return_err!(
annotation.id_span,
"public items cannot be marked #[inline]"
);
}
}
for alternative in &data.alternatives {
try!(self.validate_alternative(alternative));
}
}
GrammarItem::InternToken(..) => {}<|fim▁hole|> }
Ok(())
}
fn validate_alternative(&self, alternative: &Alternative) -> NormResult<()> {
try!(self.validate_expr(&alternative.expr));
match norm_util::analyze_expr(&alternative.expr) {
Symbols::Named(syms) => {
if alternative.action.is_none() {
let sym = syms.iter().map(|&(_, _, sym)| sym).next().unwrap();
return_err!(
sym.span,
"named symbols (like `{}`) require a custom action",
sym
);
}
}
Symbols::Anon(_) => {
let empty_string = "".to_string();
let action = {
match alternative.action {
Some(ActionKind::User(ref action)) => action,
Some(ActionKind::Fallible(ref action)) => action,
_ => &empty_string,
}
};
if norm_util::check_between_braces(action).is_in_curly_brackets() {
return_err!(
alternative.span,
"Using `<>` between curly braces (e.g., `{{<>}}`) only works when your parsed values have been given names (e.g., `<x:Foo>`, not just `<Foo>`)");
}
}
}
Ok(())
}
fn validate_expr(&self, expr: &ExprSymbol) -> NormResult<()> {
for symbol in &expr.symbols {
try!(self.validate_symbol(symbol));
}
let chosen: Vec<&Symbol> = expr.symbols
.iter()
.filter(|sym| match sym.kind {
SymbolKind::Choose(_) => true,
_ => false,
})
.collect();
let named: Multimap<Atom, Vec<&Symbol>> = expr.symbols
.iter()
.filter_map(|sym| match sym.kind {
SymbolKind::Name(ref nt, _) => Some((nt.clone(), sym)),
_ => None,
})
.collect();
if !chosen.is_empty() && !named.is_empty() {
return_err!(
chosen[0].span,
"anonymous symbols like this one cannot be combined with \
named symbols like `{}`",
named.into_iter().next().unwrap().1[0]
);
}
for (name, syms) in named.into_iter() {
if syms.len() > 1 {
return_err!(
syms[1].span,
"multiple symbols named `{}` are not permitted",
name
);
}
}
Ok(())
}
fn validate_symbol(&self, symbol: &Symbol) -> NormResult<()> {
match symbol.kind {
SymbolKind::Expr(ref expr) => {
try!(self.validate_expr(expr));
}
SymbolKind::AmbiguousId(_) => { /* see resolve */ }
SymbolKind::Terminal(_) => { /* see postvalidate! */ }
SymbolKind::Nonterminal(_) => { /* see resolve */ }
SymbolKind::Error => {
let mut algorithm = r::Algorithm::default();
read_algorithm(&self.grammar.annotations, &mut algorithm);
if algorithm.codegen == r::LrCodeGeneration::RecursiveAscent {
return_err!(
symbol.span,
"error recovery is not yet supported by recursive ascent parsers"
);
}
}
SymbolKind::Macro(ref msym) => {
debug_assert!(msym.args.len() > 0);
for arg in &msym.args {
try!(self.validate_symbol(arg));
}
}
SymbolKind::Repeat(ref repeat) => {
try!(self.validate_symbol(&repeat.symbol));
}
SymbolKind::Choose(ref sym) | SymbolKind::Name(_, ref sym) => {
try!(self.validate_symbol(sym));
}
SymbolKind::Lookahead | SymbolKind::Lookbehind => {
// if using an internal tokenizer, lookahead/lookbehind are ok.
if let Some(extern_token) = self.extern_token {
if extern_token.enum_token.is_some() {
// otherwise, the Location type must be specified.
let loc = Atom::from(LOCATION);
if self.extern_token.unwrap().associated_type(loc).is_none() {
return_err!(
symbol.span,
"lookahead/lookbehind require you to declare the type of \
a location; add a `type {} = ..` statement to the extern token \
block",
LOCATION
);
}
}
}
}
}
Ok(())
}
}<|fim▁end|> | } |
<|file_name|>mnist_model.py<|end_file_name|><|fim▁begin|>"""
CNN on mnist data using fluid api of paddlepaddle
"""
import paddle
import paddle.fluid as fluid
def mnist_cnn_model(img):
"""
Mnist cnn model
Args:
img(Varaible): the input image to be recognized
Returns:
Variable: the label prediction
"""
conv_pool_1 = fluid.nets.simple_img_conv_pool(
input=img,
num_filters=20,
filter_size=5,
pool_size=2,
pool_stride=2,
act='relu')
conv_pool_2 = fluid.nets.simple_img_conv_pool(
input=conv_pool_1,
num_filters=50,
filter_size=5,
pool_size=2,
pool_stride=2,
act='relu')
fc = fluid.layers.fc(input=conv_pool_2, size=50, act='relu')
logits = fluid.layers.fc(input=fc, size=10, act='softmax')
return logits
def main():
"""
Train the cnn model on mnist datasets
"""
img = fluid.layers.data(name='img', shape=[1, 28, 28], dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
logits = mnist_cnn_model(img)
cost = fluid.layers.cross_entropy(input=logits, label=label)
avg_cost = fluid.layers.mean(x=cost)
optimizer = fluid.optimizer.Adam(learning_rate=0.01)
optimizer.minimize(avg_cost)
batch_size = fluid.layers.create_tensor(dtype='int64')
batch_acc = fluid.layers.accuracy(
input=logits, label=label, total=batch_size)
BATCH_SIZE = 50
PASS_NUM = 3
ACC_THRESHOLD = 0.98
LOSS_THRESHOLD = 10.0
train_reader = paddle.batch(
paddle.reader.shuffle(
paddle.dataset.mnist.train(), buf_size=500),
batch_size=BATCH_SIZE)
# use CPU
place = fluid.CPUPlace()
# use GPU
# place = fluid.CUDAPlace(0)
exe = fluid.Executor(place)
feeder = fluid.DataFeeder(feed_list=[img, label], place=place)
exe.run(fluid.default_startup_program())<|fim▁hole|> pass_acc = fluid.average.WeightedAverage()
for pass_id in range(PASS_NUM):
pass_acc.reset()
for data in train_reader():
loss, acc, b_size = exe.run(
fluid.default_main_program(),
feed=feeder.feed(data),
fetch_list=[avg_cost, batch_acc, batch_size])
pass_acc.add(value=acc, weight=b_size)
pass_acc_val = pass_acc.eval()[0]
print("pass_id=" + str(pass_id) + " acc=" + str(acc[0]) +
" pass_acc=" + str(pass_acc_val))
if loss < LOSS_THRESHOLD and pass_acc_val > ACC_THRESHOLD:
# early stop
break
print("pass_id=" + str(pass_id) + " pass_acc=" + str(pass_acc.eval()[
0]))
fluid.io.save_params(
exe, dirname='./mnist', main_program=fluid.default_main_program())
print('train mnist done')
if __name__ == '__main__':
main()<|fim▁end|> | |
<|file_name|>parser_ips.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sys
from intelmq.lib import utils
from intelmq.lib.bot import Bot
from intelmq.lib.message import Event
class MalwareGroupIPsParserBot(Bot):
def process(self):
report = self.receive_message()
if not report:
self.acknowledge_message()
return
if not report.contains("raw"):
self.acknowledge_message()
raw_report = utils.base64_decode(report.value("raw"))
raw_report = raw_report.split("<tbody>")[1]
raw_report = raw_report.split("</tbody>")[0]
raw_report_splitted = raw_report.split("<tr>")
for row in raw_report_splitted:
row = row.strip()
if row == "":
continue
row_splitted = row.split("<td>")<|fim▁hole|> ip = row_splitted[1].split('">')[1].split("<")[0].strip()
time_source = row_splitted[6].replace("</td></tr>", "").strip()
time_source = time_source + " 00:00:00 UTC"
event = Event(report)
event.add('time.source', time_source, sanitize=True)
event.add('classification.type', u'malware')
event.add('source.ip', ip, sanitize=True)
event.add('raw', row, sanitize=True)
self.send_message(event)
self.acknowledge_message()
if __name__ == "__main__":
bot = MalwareGroupIPsParserBot(sys.argv[1])
bot.start()<|fim▁end|> | |
<|file_name|>Applicant.java<|end_file_name|><|fim▁begin|>package org.glamey.training.designmodel.responsibility_chain;
import lombok.Builder;
import lombok.Getter;
import java.math.BigDecimal;<|fim▁hole|>
/**
* @author zhouyang.zhou. 2017.08.14.16.
*/
@Getter
@Builder
public class Applicant {
private String name;
private String subject;
private BigDecimal money;
}<|fim▁end|> | |
<|file_name|>cookie.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, unicode_literals
from django.utils.encoding import force_str
from django.utils import six
from django.utils.six.moves import http_cookies
# Some versions of Python 2.7 and later won't need this encoding bug fix:
_cookie_encodes_correctly = http_cookies.SimpleCookie().value_encode(';') == (';', '"\\073"')
# See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256
_tc = http_cookies.SimpleCookie()
try:
_tc.load(str('foo:bar=1'))
_cookie_allows_colon_in_names = True
except http_cookies.CookieError:
_cookie_allows_colon_in_names = False
if _cookie_encodes_correctly and _cookie_allows_colon_in_names:
SimpleCookie = http_cookies.SimpleCookie
else:
Morsel = http_cookies.Morsel
class SimpleCookie(http_cookies.SimpleCookie):
if not _cookie_encodes_correctly:
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
if not _cookie_allows_colon_in_names:
def load(self, rawdata):
self.bad_cookies = set()
if six.PY2 and isinstance(rawdata, six.text_type):
rawdata = force_str(rawdata)
super(SimpleCookie, self).load(rawdata)
for key in self.bad_cookies:
del self[key]
# override private __set() method:
# (needed for using our Morsel, and for laxness with CookieError
def _BaseCookie__set(self, key, real_value, coded_value):
key = force_str(key)
try:
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
except http_cookies.CookieError:
self.bad_cookies.add(key)
dict.__setitem__(self, key, http_cookies.Morsel())
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, http_cookies.BaseCookie):
try:
c = SimpleCookie()<|fim▁hole|> # Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict<|fim▁end|> | c.load(cookie)
except http_cookies.CookieError: |
<|file_name|>ApplicationActionBarAdvisor.java<|end_file_name|><|fim▁begin|>package fr.obeo.dsl.minidrone.application;
import org.eclipse.jface.action.IMenuManager;
import org.eclipse.ui.IWorkbenchWindow;
import org.eclipse.ui.application.ActionBarAdvisor;
import org.eclipse.ui.application.IActionBarConfigurer;
public class ApplicationActionBarAdvisor extends ActionBarAdvisor {
public ApplicationActionBarAdvisor(IActionBarConfigurer configurer) {
super(configurer);
}
protected void makeActions(IWorkbenchWindow window) {
}<|fim▁hole|>}<|fim▁end|> |
protected void fillMenuBar(IMenuManager menuBar) {
}
|
<|file_name|>iomgr_windows.cc<|end_file_name|><|fim▁begin|>/*
*
* Copyright 2015 gRPC authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <grpc/support/port_platform.h>
#include "src/core/lib/iomgr/port.h"
#ifdef GRPC_WINSOCK_SOCKET
#include "src/core/lib/iomgr/sockaddr_windows.h"
#include <grpc/support/log.h>
#include "src/core/lib/iomgr/iocp_windows.h"
#include "src/core/lib/iomgr/iomgr.h"
#include "src/core/lib/iomgr/pollset_windows.h"
#include "src/core/lib/iomgr/resolve_address.h"
#include "src/core/lib/iomgr/socket_windows.h"
#include "src/core/lib/iomgr/tcp_client.h"
#include "src/core/lib/iomgr/tcp_server.h"
#include "src/core/lib/iomgr/timer.h"
extern grpc_tcp_server_vtable grpc_windows_tcp_server_vtable;
extern grpc_tcp_client_vtable grpc_windows_tcp_client_vtable;
extern grpc_timer_vtable grpc_generic_timer_vtable;
extern grpc_pollset_vtable grpc_windows_pollset_vtable;
extern grpc_pollset_set_vtable grpc_windows_pollset_set_vtable;
extern grpc_address_resolver_vtable grpc_windows_resolver_vtable;
/* Windows' io manager is going to be fully designed using IO completion
ports. All of what we're doing here is basically make sure that
Windows sockets are initialized in and out. */
static void winsock_init(void) {
WSADATA wsaData;
int status = WSAStartup(MAKEWORD(2, 0), &wsaData);
GPR_ASSERT(status == 0);
}
static void winsock_shutdown(void) {
int status = WSACleanup();
GPR_ASSERT(status == 0);
}
static void iomgr_platform_init(void) {
winsock_init();
grpc_iocp_init();
grpc_pollset_global_init();
}
static void iomgr_platform_flush(void) { grpc_iocp_flush(); }
static void iomgr_platform_shutdown(void) {
grpc_pollset_global_shutdown();
grpc_iocp_shutdown();
winsock_shutdown();
}
static void iomgr_platform_shutdown_background_closure(void) {}
static bool iomgr_platform_is_any_background_poller_thread(void) {
return false;
}
static grpc_iomgr_platform_vtable vtable = {
iomgr_platform_init, iomgr_platform_flush, iomgr_platform_shutdown,
iomgr_platform_shutdown_background_closure,
iomgr_platform_is_any_background_poller_thread};
<|fim▁hole|>void grpc_set_default_iomgr_platform() {
grpc_set_tcp_client_impl(&grpc_windows_tcp_client_vtable);
grpc_set_tcp_server_impl(&grpc_windows_tcp_server_vtable);
grpc_set_timer_impl(&grpc_generic_timer_vtable);
grpc_set_pollset_vtable(&grpc_windows_pollset_vtable);
grpc_set_pollset_set_vtable(&grpc_windows_pollset_set_vtable);
grpc_set_resolver_impl(&grpc_windows_resolver_vtable);
grpc_set_iomgr_platform_vtable(&vtable);
}
bool grpc_iomgr_run_in_background() { return false; }
#endif /* GRPC_WINSOCK_SOCKET */<|fim▁end|> | |
<|file_name|>esempio0_json.py<|end_file_name|><|fim▁begin|>import json
import sys
import urllib
keyword = sys.argv[1]
def getTimes(query,num):
"Questa funzione fa una ricerca su NY Times"
url = "http://query.nytimes.com/svc/cse/v2/sitesearch.json?query="+query.replace(" ","%20")+"&pt=article&page="+str(num)
jtext = urllib.urlopen(url)
return jtext
def search(term):
page_number = 0
meta = 1
while meta > 0 and page_number<1:
gt = getTimes(term,page_number)
resp = json.load(gt)
meta = int(resp['results']['meta']['payload'])
for res in resp['results']['results']:
print res['snippet']
headline = res['hdl']
# snippet = res['snippet']
# author = res['cre']
url = res['url']<|fim▁hole|> print url
page_number+=1
search(keyword)<|fim▁end|> | print headline.encode('utf-8') |
<|file_name|>password-generator.js<|end_file_name|><|fim▁begin|>// passwort-generator.js
// http://passwort-generieren.de
// (c) 2014 Jan Krause
(function() {
"use strict";
var root = this;
var PasswordGenerator = function(options) {
if(!options){
options = {};
options.el = document.body;
}
this.options = this.extend(options, this.default_options);
};
// Export the object for **Node.js**
if (typeof exports !== 'undefined') {
if (typeof module !== 'undefined' && module.exports) {<|fim▁hole|> }
exports.PasswordGenerator = PasswordGenerator;
} else {
root.PasswordGenerator = PasswordGenerator;
}
PasswordGenerator.prototype = {
options: {},
default_options: {
length: 11,
lowercase: true,
uppercase: true,
numbers: true,
special_character: true,
brackets: true,
minus: true,
underscore: true,
space: true
},
_passwort: '',
extend: function(options,defaults){
var extended = {};
var prop;
for (prop in defaults) {
if (Object.prototype.hasOwnProperty.call(defaults, prop)) {
extended[prop] = defaults[prop];
}
}
for (prop in options) {
if (Object.prototype.hasOwnProperty.call(options, prop)) {
extended[prop] = options[prop];
}
}
return extended;
},
generate: function() {
var _i, _len, _passwortString = '';
if(this.options.lowercase){
_passwortString += 'abcdefghijklmnopqrstuvwxyz';
}
if(this.options.uppercase){
_passwortString += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ';
}
if(this.options.numbers){
_passwortString += '0123456789';
}
if(this.options.special_character){
_passwortString += ',.;:#+~*=&%$§!|/€@""^°`´\'\\';
}
if(this.options.brackets){
_passwortString += '<>[](){}';
}
if(this.options.minus){
_passwortString += '-';
}
if(this.options.underscore){
_passwortString += '_';
}
if(this.options.space){
_passwortString += ' ';
}
this._passwort = '';
for (_i = 0, _len = this.options.length; _i < _len; _i++) {
this._passwort += _passwortString.charAt(Math.floor(Math.random() * _passwortString.length));
}
},
set: function(param) {
this.options = this.extend(param,this.options);
},
get: function() {
this.generate();
return this._passwort;
},
render: function() {
this.options.el.innerHTML = this.get();
}
};
}.call(this));<|fim▁end|> | exports = module.exports = PasswordGenerator; |
<|file_name|>CanvasAnimation.js<|end_file_name|><|fim▁begin|>var __extends = (this && this.__extends) || function (d, b) {
for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p];
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
function __export(m) {
for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p];
}
var Parameter = require("../src/Parameter");
var OT = require("./FRP");
var glow = require("./glow");
__export(require("./types"));
var DEBUG = false;
/**
* Each frame an animation is provided a CanvasTick. The tick exposes access to the local animation time, the
* time delta between the previous frame (dt) and the drawing context. Animators typically use the drawing context
* directly, and pass the clock onto any time varying parameters.
*/
var CanvasTick = (function (_super) {
__extends(CanvasTick, _super);
function CanvasTick(clock, dt, ctx, events, previous) {
_super.call(this, clock, dt, previous);
this.clock = clock;
this.dt = dt;
this.ctx = ctx;
this.events = events;
this.previous = previous;
}
CanvasTick.prototype.copy = function () {
return new CanvasTick(this.clock, this.dt, this.ctx, this.events, this.previous);
};
CanvasTick.prototype.save = function () {
var cp = _super.prototype.save.call(this);
cp.ctx.save();
return cp;
};
CanvasTick.prototype.restore = function () {
var cp = _super.prototype.restore.call(this);
cp.ctx.restore();
return cp;
};
return CanvasTick;
})(OT.BaseTick);
exports.CanvasTick = CanvasTick;
var Animation = (function (_super) {
__extends(Animation, _super);
function Animation(attach) {
_super.call(this, attach);
this.attach = attach;
}
/**
* subclasses should override this to create another animation of the same type
* @param attach
*/
Animation.prototype.create = function (attach) {
if (attach === void 0) { attach = function (nop) { return nop; }; }
return new Animation(attach);
};
/**
* Affect this with an effect to create combined animation.
* Debug messages are inserted around the effect (e.g. a mutation to the canvas).
* You can expose time varying or constant parameters to the inner effect using the optional params.
*/
Animation.prototype.loggedAffect = function (label, effectBuilder, param1, param2, param3, param4, param5, param6, param7, param8) {
if (DEBUG)
console.log(label + ": build");
return this.affect(function () {
if (DEBUG)
console.log(label + ": attach");
var effect = effectBuilder();
return function (tick, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8) {
if (DEBUG) {
var elements = [];
if (arg1)
elements.push(arg1 + "");
if (arg2)
elements.push(arg2 + "");
if (arg3)
elements.push(arg3 + "");
if (arg4)
elements.push(arg4 + "");<|fim▁hole|> if (arg5)
elements.push(arg5 + "");
if (arg6)
elements.push(arg6 + "");
if (arg7)
elements.push(arg7 + "");
if (arg8)
elements.push(arg8 + "");
console.log(label + ": tick (" + elements.join(",") + ")");
}
effect(tick, arg1, arg2, arg3, arg4, arg5, arg6, arg7, arg8);
};
}, (param1 ? Parameter.from(param1) : undefined), (param2 ? Parameter.from(param2) : undefined), (param3 ? Parameter.from(param3) : undefined), (param4 ? Parameter.from(param4) : undefined), (param5 ? Parameter.from(param5) : undefined), (param6 ? Parameter.from(param6) : undefined), (param7 ? Parameter.from(param7) : undefined), (param8 ? Parameter.from(param8) : undefined));
};
Animation.prototype.velocity = function (velocity) {
if (DEBUG)
console.log("velocity: build");
return this.affect(function () {
if (DEBUG)
console.log("velocity: attach");
var pos = [0.0, 0.0];
return function (tick, velocity) {
if (DEBUG)
console.log("velocity: tick", velocity, pos);
tick.ctx.transform(1, 0, 0, 1, pos[0], pos[1]);
pos[0] += velocity[0] * tick.dt;
pos[1] += velocity[1] * tick.dt;
};
}, Parameter.from(velocity));
};
Animation.prototype.tween_linear = function (from, to, time) {
return this.affect(function () {
var t = 0;
if (DEBUG)
console.log("tween: init");
return function (tick, from, to, time) {
t = t + tick.dt;
if (t > time)
t = time;
var x = from[0] + (to[0] - from[0]) * t / time;
var y = from[1] + (to[1] - from[1]) * t / time;
if (DEBUG)
console.log("tween: tick", x, y, t);
tick.ctx.transform(1, 0, 0, 1, x, y);
};
}, Parameter.from(from), Parameter.from(to), Parameter.from(time));
};
Animation.prototype.glow = function (decay) {
if (decay === void 0) { decay = 0.1; }
return glow.glow(this, decay);
};
// Canvas API
/**
* Dynamic chainable wrapper for strokeStyle in the canvas API.
*/
Animation.prototype.strokeStyle = function (color) {
return this.loggedAffect("strokeStyle", function () { return function (tick, color) {
return tick.ctx.strokeStyle = color;
}; }, color);
};
/**
* Dynamic chainable wrapper for fillStyle in the canvas API.
*/
Animation.prototype.fillStyle = function (color) {
return this.loggedAffect("fillStyle", function () { return function (tick, color) {
return tick.ctx.fillStyle = color;
}; }, color);
};
/**
* Dynamic chainable wrapper for shadowColor in the canvas API.
*/
Animation.prototype.shadowColor = function (color) {
return this.loggedAffect("shadowColor", function () { return function (tick, color) {
return tick.ctx.shadowColor = color;
}; }, color);
};
/**
* Dynamic chainable wrapper for shadowBlur in the canvas API.
*/
Animation.prototype.shadowBlur = function (level) {
return this.loggedAffect("shadowBlur", function () { return function (tick, level) {
return tick.ctx.shadowBlur = level;
}; }, level);
};
/**
* Dynamic chainable wrapper for shadowOffsetX and shadowOffsetY in the canvas API.
*/
Animation.prototype.shadowOffset = function (xy) {
return this.loggedAffect("shadowOffset", function () { return function (tick, xy) {
tick.ctx.shadowOffsetX = xy[0];
tick.ctx.shadowOffsetY = xy[1];
}; }, xy);
};
/**
* Dynamic chainable wrapper for lineCap in the canvas API.
*/
Animation.prototype.lineCap = function (style) {
return this.loggedAffect("lineCap", function () { return function (tick, arg) {
return tick.ctx.lineCap = arg;
}; }, style);
};
/**
* Dynamic chainable wrapper for lineJoin in the canvas API.
*/
Animation.prototype.lineJoin = function (style) {
return this.loggedAffect("lineJoin", function () { return function (tick, arg) {
return tick.ctx.lineJoin = arg;
}; }, style);
};
/**
* Dynamic chainable wrapper for lineWidth in the canvas API.
*/
Animation.prototype.lineWidth = function (width) {
return this.loggedAffect("lineWidth", function () { return function (tick, arg) {
return tick.ctx.lineWidth = arg;
}; }, width);
};
/**
* Dynamic chainable wrapper for miterLimit in the canvas API.
*/
Animation.prototype.miterLimit = function (limit) {
return this.loggedAffect("miterLimit", function () { return function (tick, arg) {
return tick.ctx.miterLimit = arg;
}; }, limit);
};
/**
* Dynamic chainable wrapper for rect in the canvas API.
*/
Animation.prototype.rect = function (xy, width_height) {
return this.loggedAffect("rect", function () { return function (tick, xy, width_height) {
return tick.ctx.rect(xy[0], xy[1], width_height[0], width_height[1]);
}; }, xy, width_height);
};
/**
* Dynamic chainable wrapper for fillRect in the canvas API.
*/
Animation.prototype.fillRect = function (xy, width_height) {
return this.loggedAffect("fillRect", function () { return function (tick, xy, width_height) {
return tick.ctx.fillRect(xy[0], xy[1], width_height[0], width_height[1]);
}; }, xy, width_height);
};
/**
* Dynamic chainable wrapper for strokeRect in the canvas API.
*/
Animation.prototype.strokeRect = function (xy, width_height) {
return this.loggedAffect("strokeRect", function () { return function (tick, xy, width_height) {
return tick.ctx.strokeRect(xy[0], xy[1], width_height[0], width_height[1]);
}; }, xy, width_height);
};
/**
* Dynamic chainable wrapper for clearRect in the canvas API.
*/
Animation.prototype.clearRect = function (xy, width_height) {
return this.loggedAffect("clearRect", function () { return function (tick, xy, width_height) {
return tick.ctx.clearRect(xy[0], xy[1], width_height[0], width_height[1]);
}; }, xy, width_height);
};
/**
* Encloses the inner animation with a beginpath() and endpath() from the canvas API.
*
* This returns a path object which events can be subscribed to
*/
Animation.prototype.withinPath = function (inner) {
return this.pipe(new PathAnimation(function (upstream) {
if (DEBUG)
console.log("withinPath: attach");
var beginPathBeforeInner = upstream.tapOnNext(function (tick) { return tick.ctx.beginPath(); });
return inner.attach(beginPathBeforeInner).tapOnNext(function (tick) { return tick.ctx.closePath(); });
}));
};
/**
* Dynamic chainable wrapper for fill in the canvas API.
*/
Animation.prototype.closePath = function () {
return this.loggedAffect("closePath", function () { return function (tick) {
return tick.ctx.closePath();
}; });
};
/**
* Dynamic chainable wrapper for fill in the canvas API.
*/
Animation.prototype.beginPath = function () {
return this.loggedAffect("beginPath", function () { return function (tick) {
return tick.ctx.beginPath();
}; });
};
/**
* Dynamic chainable wrapper for fill in the canvas API.
*/
Animation.prototype.fill = function () {
return this.loggedAffect("fill", function () { return function (tick) {
return tick.ctx.fill();
}; });
};
/**
* Dynamic chainable wrapper for stroke in the canvas API.
*/
Animation.prototype.stroke = function () {
return this.loggedAffect("stroke", function () { return function (tick) {
return tick.ctx.stroke();
}; });
};
/**
* Dynamic chainable wrapper for moveTo in the canvas API.
*/
Animation.prototype.moveTo = function (xy) {
return this.loggedAffect("moveTo", function () { return function (tick, xy) {
return tick.ctx.moveTo(xy[0], xy[1]);
}; }, xy);
};
/**
* Dynamic chainable wrapper for lineTo in the canvas API.
*/
Animation.prototype.lineTo = function (xy) {
return this.loggedAffect("lineTo", function () { return function (tick, xy) {
return tick.ctx.lineTo(xy[0], xy[1]);
}; }, xy);
};
/**
* Dynamic chainable wrapper for clip in the canvas API.
*/
Animation.prototype.clip = function () {
return this.loggedAffect("clip", function () { return function (tick) {
return tick.ctx.clip();
}; });
};
/**
* Dynamic chainable wrapper for quadraticCurveTo in the canvas API. Use with withinPath.
*/
Animation.prototype.quadraticCurveTo = function (control, end) {
return this.loggedAffect("quadraticCurveTo", function () { return function (tick, arg1, arg2) {
return tick.ctx.quadraticCurveTo(arg1[0], arg1[1], arg2[0], arg2[1]);
}; }, control, end);
};
/**
* Dynamic chainable wrapper for bezierCurveTo in the canvas API. Use with withinPath.
*/
Animation.prototype.bezierCurveTo = function (control1, control2, end) {
return this.loggedAffect("bezierCurveTo", function () { return function (tick, arg1, arg2, arg3) {
return tick.ctx.bezierCurveTo(arg1[0], arg1[1], arg2[0], arg2[1], arg3[0], arg3[1]);
}; }, control1, control2, end);
};
/**
* Dynamic chainable wrapper for arc in the canvas API. Use with withinPath.
*/
Animation.prototype.arcTo = function (tangent1, tangent2, radius) {
return this.loggedAffect("arcTo", function () { return function (tick, arg1, arg2, arg3) {
return tick.ctx.arcTo(arg1[0], arg1[1], arg2[0], arg2[1], arg3);
}; }, tangent1, tangent2, radius);
};
/**
* Dynamic chainable wrapper for scale in the canvas API.
*/
Animation.prototype.scale = function (xy) {
return this.loggedAffect("scale", function () { return function (tick, xy) {
return tick.ctx.scale(xy[0], xy[1]);
}; }, xy);
};
/**
* Dynamic chainable wrapper for rotate in the canvas API.
*/
Animation.prototype.rotate = function (clockwiseRadians) {
return this.loggedAffect("rotate", function () { return function (tick, arg) {
return tick.ctx.rotate(arg);
}; }, clockwiseRadians);
};
/**
* Dynamic chainable wrapper for translate in the canvas API.
*/
Animation.prototype.translate = function (xy) {
return this.loggedAffect("translate", function () { return function (tick, xy) {
tick.ctx.translate(xy[0], xy[1]);
}; }, xy);
};
/**
* Dynamic chainable wrapper for translate in the canvas API.
* [ a c e
* b d f
* 0 0 1 ]
*/
Animation.prototype.transform = function (a, b, c, d, e, f) {
return this.loggedAffect("transform", function () { return function (tick, arg1, arg2, arg3, arg4, arg5, arg6) {
return tick.ctx.transform(arg1, arg2, arg3, arg4, arg5, arg6);
}; }, a, b, c, d, e, f);
};
/**
* Dynamic chainable wrapper for setTransform in the canvas API.
*/
Animation.prototype.setTransform = function (a, b, c, d, e, f) {
return this.loggedAffect("setTransform", function () { return function (tick, arg1, arg2, arg3, arg4, arg5, arg6) {
return tick.ctx.setTransform(arg1, arg2, arg3, arg4, arg5, arg6);
}; }, a, b, c, d, e, f);
};
/**
* Dynamic chainable wrapper for font in the canvas API.
*/
Animation.prototype.font = function (style) {
return this.loggedAffect("font", function () { return function (tick, arg) {
return tick.ctx.font = arg;
}; }, style);
};
/**
* Dynamic chainable wrapper for textAlign in the canvas API.
*/
Animation.prototype.textAlign = function (style) {
return this.loggedAffect("textAlign", function () { return function (tick, arg) {
return tick.ctx.textAlign = arg;
}; }, style);
};
/**
* Dynamic chainable wrapper for textBaseline in the canvas API.
*/
Animation.prototype.textBaseline = function (style) {
return this.loggedAffect("textBaseline", function () { return function (tick, arg) {
return tick.ctx.textBaseline = arg;
}; }, style);
};
/**
* Dynamic chainable wrapper for textBaseline in the canvas API.
*/
Animation.prototype.fillText = function (text, xy, maxWidth) {
if (maxWidth) {
return this.loggedAffect("fillText", function () { return function (tick, text, xy, maxWidth) {
return tick.ctx.fillText(text, xy[0], xy[1], maxWidth);
}; }, text, xy, maxWidth);
}
else {
return this.loggedAffect("fillText", function () { return function (tick, text, xy, maxWidth) {
return tick.ctx.fillText(text, xy[0], xy[1]);
}; }, text, xy);
}
};
/**
* Dynamic chainable wrapper for drawImage in the canvas API.
*/
Animation.prototype.drawImage = function (img, xy) {
return this.loggedAffect("drawImage", function () { return function (tick, img, xy) {
return tick.ctx.drawImage(img, xy[0], xy[1]);
}; }, img, xy);
};
/**
* * Dynamic chainable wrapper for globalCompositeOperation in the canvas API.
*/
Animation.prototype.globalCompositeOperation = function (operation) {
return this.loggedAffect("globalCompositeOperation", function () { return function (tick, arg) {
return tick.ctx.globalCompositeOperation = arg;
}; }, operation);
};
Animation.prototype.arc = function (center, radius, radStartAngle, radEndAngle, counterclockwise) {
if (counterclockwise === void 0) { counterclockwise = false; }
return this.loggedAffect("arc", function () { return function (tick, arg1, arg2, arg3, arg4, counterclockwise) {
return tick.ctx.arc(arg1[0], arg1[1], arg2, arg3, arg4, counterclockwise);
}; }, center, radius, radStartAngle, radEndAngle, counterclockwise);
};
return Animation;
})(OT.SignalPipe);
exports.Animation = Animation;
function create(attach) {
if (attach === void 0) { attach = function (x) { return x; }; }
return new Animation(attach);
}
exports.create = create;
var PathAnimation = (function (_super) {
__extends(PathAnimation, _super);
function PathAnimation() {
_super.apply(this, arguments);
}
return PathAnimation;
})(Animation);
exports.PathAnimation = PathAnimation;
function save(width, height, path) {
var GIFEncoder = require('gifencoder');
var fs = require('fs');
var encoder = new GIFEncoder(width, height);
encoder.createReadStream()
.pipe(encoder.createWriteStream({ repeat: 10000, delay: 100, quality: 1 }))
.pipe(fs.createWriteStream(path));
encoder.start();
return new Animation(function (upstream) {
return upstream.tap(function (tick) {
if (DEBUG)
console.log("save: wrote frame");
encoder.addFrame(tick.ctx);
}, function () { console.error("save: not saved", path); }, function () { console.log("save: saved", path); encoder.finish(); });
});
}
exports.save = save;<|fim▁end|> | |
<|file_name|>PersistenceEngine.py<|end_file_name|><|fim▁begin|>import os
import sys
import time
'''
@author: msune,omoya,CarolinaFernandez
@@organization: i2CAT, OFELIA FP7
Persistence engine
Implementes driver-based persistence backend selection
'''
class PersistenceEngine():
#Default Class Attributes
_defaultParser = "RegexParser"
_defaultPersistence = "Django"
#Drivers
_drivers = ["Django","RAWFile"]
#Fill with appropiate path
PATH_TO_DRIVERS="backends"
def __init__(self):
raise Exception("Static class cannot be instanciated")
@staticmethod
def _getDriver(driverName):
print "driver name: %s" %driverName
if driverName == "Django":
PATH = PersistenceEngine.PATH_TO_DRIVERS + '.django.Django'
try:
exec('from ' + PATH + ' import Django')
return Django
except:
raise Exception(driverName + ' persistence driver not found in ' + PersistenceEngine.PATH_TO_DRIVERS)
elif driverName == "RAWFile":
PATH = PersistenceEngine.PATH_TO_DRIVERS + '.rawfile.RAWFile'
try:<|fim▁hole|> else:
raise Exception(driverName + ' not supported')
@staticmethod
def save(obj, pBackend, parser=None, **kwargs):
return PersistenceEngine._getDriver(pBackend).save(obj, parser, **kwargs)
@staticmethod
def load(tableName, pBackend, resolverMappings, parser=None, **kwargs):
return PersistenceEngine._getDriver(pBackend).load(tableName, resolverMappings, parser, **kwargs)
'''
Retrieves every Driver's PolicyRuleTable object for a given name.
This method should be seldom used.
'''
@staticmethod
def loadAll(tableName, pBackend):
return PersistenceEngine._getDriver(pBackend).loadAll(tableName)
'''
Deletes a Driver's PolicyRuleTable object for a given ID.
This method should be seldom used.
'''
@staticmethod
def delete(tableID, pBackend):
return PersistenceEngine._getDriver(pBackend).delete(tableID)<|fim▁end|> | exec('from ' + PATH + ' import RAWFile')
return RAWFile
except:
raise Exception(driverName + ' persistence driver not found in ' + PersistenceEngine.PATH_TO_DRIVERS) |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var express = require('express');
var router = express.Router();
/* GET home page. */
router.get('/', function (req, res, next) {
res.render('index', { title: 'Express' });<|fim▁hole|> res.render('canvas');
});
module.exports = router;<|fim▁end|> | });
//test canvas
router.get('/canvas', function (req, res, next) { |
<|file_name|>amdDeclarationEmitNoExtraDeclare.ts<|end_file_name|><|fim▁begin|>// @declaration: true
// @module: amd
// @out: dist.js
// @filename: Class.ts
import { Configurable } from "./Configurable"
export class HiddenClass {}
export class ActualClass extends Configurable(HiddenClass) {}
// @filename: Configurable.ts
export type Constructor<T> = {
new(...args: any[]): T;
}
export function Configurable<T extends Constructor<{}>>(base: T): T {
return class extends base {
constructor(...args: any[]) {
super(...args);
}
<|fim▁hole|><|fim▁end|> |
};
} |
<|file_name|>tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
import time
from .models import SimpleTree, MPTTTree, TBMP, TBNS
def timeit(method):
""" Measure time of method's execution.
"""
def timed(*args, **kw):
ts = time.time()
result = method(*args, **kw)
te = time.time()
print '\n%r: %2.2f sec' % \
(method.__name__, te - ts)
return result
return timed<|fim▁hole|>CYCLES = 8
class Benchmark(object):
@timeit
def test_creation(self):
self._create_tree()
def test_delete(self):
self._create_tree(cycles=7)
@timeit
def test_deletion():
for _ in xrange(pow(2, CYCLES) / 2):
self._delete_last()
test_deletion()
def test_get(self):
self._create_tree(cycles=7)
@timeit
def test_get_tree():
root = self._get_root()
for _ in xrange(100):
self._get_tree(root)
test_get_tree()
def _create_tree(self, cycles=CYCLES):
root = self._create_root(title='root1')
nodes = [root]
for _ in xrange(CYCLES):
new_nodes = []
for node in nodes:
new_nodes.append(self._create_child(parent=node))
new_nodes.append(self._create_child(parent=node))
nodes = new_nodes
return nodes
def _create_root(self, **params):
pass
def _create_child(self, parent, **params):
pass
def _delete_last(self):
pass
def _get_root(self):
pass
def _get_tree(self, parent):
pass
class SimpleTest(TestCase, Benchmark):
def setUp(self):
print "\nSimpleTree benchmark"
def _create_root(self, **params):
return SimpleTree.objects.create(**params)
def _create_child(self, parent, **params):
return SimpleTree.objects.create(parent=parent, **params)
def _delete_last(self):
SimpleTree.objects.order_by('-id')[0].delete()
def _get_root(self):
return SimpleTree.objects.get(parent=None)
def _get_tree(self, parent):
return parent.get_tree()
class MPTTTest(TestCase, Benchmark):
def setUp(self):
print "\nMPTT benchmark"
def _create_root(self, **params):
return MPTTTree.objects.create(**params)
def _create_child(self, parent, **params):
return MPTTTree.objects.create(parent=parent, **params)
def _delete_last(self):
MPTTTree.objects.order_by('-id')[0].delete()
def _get_root(self):
return MPTTTree.objects.get(parent=None)
def _get_tree(self, parent):
return list(parent.get_ancestors()) + list(parent.get_descendants(include_self=False))
class TreeBeardMP(TestCase, Benchmark):
def setUp(self):
print "\nTreebeard MP benchmark"
def _create_root(self, **params):
return TBMP.add_root(**params)
def _create_child(self, parent, **params):
return parent.add_child(**params)
def _delete_last(self):
TBMP.objects.order_by('-id')[0].delete()
def _get_root(self):
return TBMP.get_root_nodes()[0]
def _get_tree(self, parent):
TBMP.get_tree(parent=parent)
class TreeBeardNS(TreeBeardMP):
def setUp(self):
print "\nTreebeard NS benchmark"
def _create_root(self, **params):
return TBNS.add_root(**params)
def _delete_last(self):
TBNS.objects.order_by('-id')[0].delete()
def _get_root(self):
return TBNS.get_root_nodes()[0]
def _get_tree(self, parent):
TBNS.get_tree(parent=parent)<|fim▁end|> | |
<|file_name|>utilities.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#coding: UTF-8
#COPIRIGHT: Patrick Roncagliolo
#LICENCE: GNU GPL 3
import cgi, json
argsDict = cgi.FieldStorage()
EMPTY_DICT = {}
def getState (init = False):
dataDict = getDataDict ()
if dataDict is None \
and init is True:
(key, uri) = generateTOTP ()
generateQR (key, uri)
dataDict = newDataDict (key, uri)
setDataDict (dataDict)
devDict = getDevDict ()
if devDict is None \
and init is True:
devDict = newDevDict ()
setDevDict (devDict)
return (dataDict, devDict)
def generateTOTP ():
import string, random
from otpauth import OtpAuth as otpauth
key=''.join((random.choice(string.ascii_uppercase + string.digits)) for x in range(30))
auth = otpauth(key)
uri = auth.to_uri('totp', 'patrick@WakeOnLAN', 'WakeOnLAN')
return (key, uri)
def generateQR (key, uri):
import os, qrcode
from glob import glob
img = qrcode.make(uri)
for oldImg in glob("data/*.png"):
os.remove(oldImg)
img.save("data/%s.png" % key)
def newDataDict (key, uri):
return {'otp-type': 'totp', 'key': key, 'uri': uri, 'post-token': '0'}
def getDataDict ():
try:
with open('data/data.json', 'r') as f:
dataDict = json.load(f)
except IOError:
dataDict = None
return dataDict
def setDataDict(dataDict):
with open('data/data.json', 'w') as dataFile:
json.dump(dataDict, dataFile)
def newDevDict():
return {}
def getDevDict():
try:
with open('data/devices.json', 'r') as devFile:
devDict = json.load(devFile)
except IOError:
devDict = None
return devDict
def setDevDict(devDict):
with open('data/devices.json', 'w') as devFile:
json.dump(devDict, devFile)
def addDevice(devDict, devname, devaddr):
devname = devname.lower().capitalize()
devaddr = devaddr.lower().replace('-',':')
if devname not in devDict:
devDict[devname]=devaddr
setDevDict(devDict)
return True
else:
return False
def rmvDevice(devDict, devname):
devname = devname.lower().capitalize()
if devname in devDict:
del devDict[devname]
setDevDict(devDict)
return True
else:
return False
def checkToken(dataDict):
if 'post-token' in dataDict.keys():
data_token = int(dataDict['post-token'])
token = data_token + 1
else:
raise KeyError
if 'action' in argsDict.keys() \
and 'token' in argsDict.keys():
post_token = int(argsDict['token'].value)
if post_token > data_token:
updateToken(dataDict, post_token)
token = post_token + 1
return (True, token)
else:
return (False, token)
else:
return (False, token)
def updateToken(dataDict, post_token):
dataDict['post-token'] = post_token
with open('data/data.json', 'w') as dataFile:
json.dump(dataDict, dataFile)
return int(dataDict['post-token'])
def printIndexHeader(stylesheets):
print 'Content-type: text/html\n\n',<|fim▁hole|> print '<title>RWOLS - Remote WakeOnLan Server</title>',
for stylesheet in stylesheets:
print '<link rel="stylesheet" type="text/css" href="%s">' % stylesheet,
print '<script src="https://cdn.jsdelivr.net/clipboard.js/1.5.13/clipboard.min.js"></script>',
print '<h1>Remote WakeOnLan Server</h1>'
def printBottomButton(label, link):
print '<form method="post"'
print 'action="%s">' % link,
print '<input type="submit"'
print 'value="%s">' % label,
print '</form>'<|fim▁end|> | print '<!DOCTYPE html>',
print '<meta name="viewport" content="width=device-width, initial-scale=1.0">', |
<|file_name|>te.js<|end_file_name|><|fim▁begin|><|fim▁hole|>OC.L10N.register(
"settings",
{
"Delete" : "తొలగించు",
"Server address" : "సేవకి చిరునామా",
"Cancel" : "రద్దుచేయి",
"Email" : "ఈమెయిలు",
"Your email address" : "మీ ఈమెయిలు చిరునామా",
"Password" : "సంకేతపదం",
"New password" : "కొత్త సంకేతపదం",
"Language" : "భాష",
"Name" : "పేరు",
"Username" : "వాడుకరి పేరు",
"Personal" : "వ్యక్తిగతం",
"Error" : "పొరపాటు"
},
"nplurals=2; plural=(n != 1);");<|fim▁end|> | |
<|file_name|>BufferGL.cpp<|end_file_name|><|fim▁begin|>//
// Copyright 2015 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// BufferGL.cpp: Implements the class methods for BufferGL.
#include "libANGLE/renderer/gl/BufferGL.h"
#include "common/debug.h"
#include "common/utilities.h"
#include "libANGLE/angletypes.h"
#include "libANGLE/formatutils.h"
#include "libANGLE/renderer/gl/FunctionsGL.h"
#include "libANGLE/renderer/gl/StateManagerGL.h"
#include "libANGLE/renderer/gl/renderergl_utils.h"
namespace rx
{
// Use the GL_COPY_READ_BUFFER binding when two buffers need to be bound simultaneously.
// GL_ELEMENT_ARRAY_BUFFER is supported on more versions but can modify the state of the currently
// bound VAO. Two simultaneous buffer bindings are only needed for glCopyBufferSubData which also
// adds the GL_COPY_READ_BUFFER binding.
static const GLenum SourceBufferOperationTarget = GL_COPY_READ_BUFFER;
// Use the GL_ELEMENT_ARRAY_BUFFER binding for most operations since it's available on all
// supported GL versions and doesn't affect any current state when it changes.
static const GLenum DestBufferOperationTarget = GL_ARRAY_BUFFER;
BufferGL::BufferGL(const FunctionsGL *functions, StateManagerGL *stateManager)
: BufferImpl(),
mIsMapped(false),
mMapOffset(0),
mMapSize(0),
mShadowBufferData(!CanMapBufferForRead(functions)),
mShadowCopy(),
mBufferSize(0),
mFunctions(functions),
mStateManager(stateManager),
mBufferID(0)
{
ASSERT(mFunctions);
ASSERT(mStateManager);
mFunctions->genBuffers(1, &mBufferID);
}
BufferGL::~BufferGL()
{
mStateManager->deleteBuffer(mBufferID);
mBufferID = 0;
}
gl::Error BufferGL::setData(const void* data, size_t size, GLenum usage)
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mFunctions->bufferData(DestBufferOperationTarget, size, data, usage);
if (mShadowBufferData)
{
if (!mShadowCopy.resize(size))
{
return gl::Error(GL_OUT_OF_MEMORY, "Failed to resize buffer data shadow copy.");
}
if (size > 0 && data != nullptr)
{
memcpy(mShadowCopy.data(), data, size);
}
}
mBufferSize = size;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::setSubData(const void* data, size_t size, size_t offset)
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mFunctions->bufferSubData(DestBufferOperationTarget, offset, size, data);
if (mShadowBufferData && size > 0)
{
memcpy(mShadowCopy.data() + offset, data, size);
}
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::copySubData(BufferImpl* source, GLintptr sourceOffset, GLintptr destOffset, GLsizeiptr size)
{
BufferGL *sourceGL = GetAs<BufferGL>(source);
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mStateManager->bindBuffer(SourceBufferOperationTarget, sourceGL->getBufferID());
mFunctions->copyBufferSubData(SourceBufferOperationTarget, DestBufferOperationTarget, sourceOffset, destOffset, size);
if (mShadowBufferData && size > 0)
{
ASSERT(sourceGL->mShadowBufferData);<|fim▁hole|> return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::map(GLenum access, GLvoid **mapPtr)
{
if (mShadowBufferData)
{
*mapPtr = mShadowCopy.data();
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
*mapPtr = mFunctions->mapBuffer(DestBufferOperationTarget, access);
}
mIsMapped = true;
mMapOffset = 0;
mMapSize = mBufferSize;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::mapRange(size_t offset, size_t length, GLbitfield access, GLvoid **mapPtr)
{
if (mShadowBufferData)
{
*mapPtr = mShadowCopy.data() + offset;
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
*mapPtr = mFunctions->mapBufferRange(DestBufferOperationTarget, offset, length, access);
}
mIsMapped = true;
mMapOffset = offset;
mMapSize = length;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::unmap(GLboolean *result)
{
ASSERT(result);
ASSERT(mIsMapped);
if (mShadowBufferData)
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
mFunctions->bufferSubData(DestBufferOperationTarget, mMapOffset, mMapSize,
mShadowCopy.data() + mMapOffset);
*result = GL_TRUE;
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
*result = mFunctions->unmapBuffer(DestBufferOperationTarget);
}
mIsMapped = false;
return gl::Error(GL_NO_ERROR);
}
gl::Error BufferGL::getIndexRange(GLenum type,
size_t offset,
size_t count,
bool primitiveRestartEnabled,
gl::IndexRange *outRange)
{
ASSERT(!mIsMapped);
if (mShadowBufferData)
{
*outRange = gl::ComputeIndexRange(type, mShadowCopy.data() + offset, count,
primitiveRestartEnabled);
}
else
{
mStateManager->bindBuffer(DestBufferOperationTarget, mBufferID);
const gl::Type &typeInfo = gl::GetTypeInfo(type);
const uint8_t *bufferData = MapBufferRangeWithFallback(
mFunctions, DestBufferOperationTarget, offset, count * typeInfo.bytes, GL_MAP_READ_BIT);
*outRange = gl::ComputeIndexRange(type, bufferData, count, primitiveRestartEnabled);
mFunctions->unmapBuffer(DestBufferOperationTarget);
}
return gl::Error(GL_NO_ERROR);
}
GLuint BufferGL::getBufferID() const
{
return mBufferID;
}
}<|fim▁end|> | memcpy(mShadowCopy.data() + destOffset, sourceGL->mShadowCopy.data() + sourceOffset, size);
}
|
<|file_name|>tests.py<|end_file_name|><|fim▁begin|># This Python file uses the following encoding: utf-8
from django.test import TestCase, RequestFactory
from models import Meeting, Abstract, Author
from django.core.urlresolvers import reverse
from fiber.models import Page
from views import AbstractCreateView
from home.models import Announcement
from datetime import datetime
from django.contrib.auth.models import AnonymousUser, User
# Factory method to create a fiber page tree with five pages.
# def create_django_page_tree():
# mainmenu = Page.objects.create(title='mainmenu')
# home = Page.objects.create(title='home', parent=mainmenu, url='home', template_name='base/home.html')
# Page.objects.create(title='join', parent=home, url='join', template_name='base/join.html')
# Page.objects.create(title='members', parent=home, url='members', template_name='base/members')
# Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Factory methods to create test abstracts, meetings, and authors
# def create_meeting(year=2020, title='Jamaica 2020', location='Jamaica', associated_with='AAPA'):
# """
# Creates a Meeting with default values for year, title, location and associated_with.
# """
# return Meeting.object.create(title, year, location=location, associated_with=associated_with)
# Factory method to create a fiber page tree with five home pages plus three meetings pages and their associated
# meeting instances.
# def create_three_meetings_with_pages():
# # Create home fiber tree
# create_django_page_tree()
# # Create meeting instances
# calgary = Meeting(year=2014, title='Calgary 2014', location='Calgary, AB', associated_with='AAPA')
# calgary.create_fiber_page()
# calgary.save()
# san_francisco = Meeting(year=2015, title='San Francisco 2015', location='San Francisco, CA', associated_with='SAA')
# san_francisco.create_fiber_page()
# san_francisco.save()
# atlanta = Meeting(year=2016, title='Atlanta 2016', location='Atlanta, GA', associated_with='AAPA')
# atlanta.create_fiber_page()
# atlanta.save()
def create_abstract(meeting,
contact_email='[email protected]',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2020):
return Abstract(meeting, contact_email, presentation_type, title, abstract_text, year=year)
def create_author(abstract, author_rank,
last_name='Fake',
first_name="Ima",
name='Ima Fake',
department='Fake Anthropology',
institution='Chaos University',
country='United States of America',
email_address='[email protected]'
):
return Author(abstract, author_rank,
last_name=last_name,
first_name=first_name,
name=name,
department=department,
institution=institution,
country=country,
email_address=email_address
)
class MeetingCreateMethodTests(TestCase):
def test_meeting_create_method(self):
starting_meeting_count = Meeting.objects.count()
pittsburgh = Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+1)
self.assertEqual(pittsburgh.title, 'Pittsburgh 1992')
self.assertEqual(pittsburgh.year, 1992)
self.assertEqual(pittsburgh.associated_with, 'SAA')
class MeetingMethodTests(TestCase):
def setUp(self):
# Create a basic page tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
# Create two meetings
starting_meeting_count = Meeting.objects.count()
Meeting.objects.create(title='Pittsburgh 1992', year=1992,
location='Pittsburgh, PA', associated_with='SAA')
Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary', associated_with='AAPA')
self.assertEqual(Meeting.objects.count(), starting_meeting_count+2)
def test_meeting_create_fiber_page_method(self):
"""
Tests the fiber page constructor method.
"""
# Fetch a meeting
calgary_2014 = Meeting.objects.get(title='Calgary 2014')
# Call page constructor method
starting_page_count = Page.objects.count()
calgary_2014.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+1)
# Fetch the fiber page we just created
calgary_2014_fiber_page = Page.objects.get(url__exact='2014')
# Test the attributes of the fiber page
self.assertEqual(calgary_2014_fiber_page.parent, Page.objects.get(url__exact='meetings'))
self.assertEqual(calgary_2014_fiber_page.url, '2014')
self.assertEqual(calgary_2014_fiber_page.title, 'Calgary 2014')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(), '/meetings/2014/')
self.assertEqual(calgary_2014_fiber_page.get_absolute_url(),
reverse('meetings:meeting_detail', kwargs={"year": 2014}))
# Test that the page renders
response = self.client.get('/meetings/2014/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
def test_meeting_has_detail_method(self):
"""
Tests the has_detail method
"""
calgary_2014 = Meeting.objects.get(year=2014)
# IF no fiber page then has_detail should be false
self.assertEqual(calgary_2014.has_detail(), False)
# Call page constructor method
calgary_2014.create_fiber_page()
# If fiber page then has_detail should be true
self.assertEqual(calgary_2014.has_detail(), True)
cfp = Page.objects.get(url__exact=2014) # get tha page instance
cfp.is_public = False # set to not public
cfp.save() # save the change
self.assertEqual(calgary_2014.has_detail(), False) # Now has detail should return false
class MeetingsViewTestsNoData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
def test_meetings_index_view_with_no_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
self.assertEqual(response.status_code, 200)
self.assertQuerysetEqual(response.context['meeting_list'], [])
class MeetingsViewTestsWithData(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
self.assertEqual(Page.objects.count(), starting_page_count+2) # test two pages saved
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
location='Atlanta, GA', associated_with='AAPA')
atlanta.create_fiber_page()
def test_meetings_index_view_with_meetings(self):
response = self.client.get(reverse('meetings:meetings')) # Meetings index should show three meetings
calgary = Meeting.objects.get(year=2014) # get meeting instance
san_francisco = Meeting.objects.get(year=2015)
atlanta = Meeting.objects.get(year=2016)
self.assertContains(response, calgary.location, status_code=200,)
self.assertContains(response, san_francisco.location, status_code=200)
self.assertContains(response, atlanta.location, status_code=200)
self.assertQuerysetEqual(response.context['meeting_list'],
['<Meeting: Atlanta 2016>',
'<Meeting: San Francisco 2015>',
'<Meeting: Calgary 2014>'])
self.assertContains(response, "<table>") # response includes a table element
self.assertContains(response, '<a href="/meetings/2014/"') # contains a link to the 2014 meeting detail
self.assertContains(response, '<a href="/meetings/2015/"')
self.assertContains(response, '<a href="/meetings/2016/"')
self.assertEqual(Page.objects.count(), 5) # should have 5 fiber pages
self.assertEqual(Meeting.objects.count(), 3) # should hav 3 meetings
atlanta_fp = Page.objects.get(url__exact=2016) # Get Atlanta fiber page
atlanta_fp.is_public = False # Set to not public
atlanta_fp.save() # save the change
self.assertEqual(atlanta_fp.is_public, False)
self.assertEqual(atlanta.has_detail(), False) # meeting should NOT have detail
self.assertEqual(atlanta_fp.show_in_menu, False) # meeting fiber page should not be in menu
response = self.client.get(reverse('meetings:meetings')) # Reload the page!
# If fiber page is not public and not in menu there should be no link to it
self.assertNotContains(response, '<a href="/meetings/2016/"')
def test_meetings_index_view_with_missing_meetings(self):
response = self.client.get(reverse('meetings:meetings'))
# Returns page but does not contain a meeting that does not exist.
self.assertNotContains(response, "Vancouver", status_code=200)
self.assertContains(response, "<table>", status_code=200) # contains a table listing meetings
def test_meetings_detail_view(self):
response = self.client.get(reverse('meetings:meeting_detail', args=[2014]))
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Calgary')
class AbstractCreateMethodTests(TestCase):
def test_abstract_create_method(self):
starting_abstract_count = Abstract.objects.count()
# create a meeting
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
# create an abstract for the meeting
new_abstract = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='[email protected]',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p>Silly walks in Neanderthals.</p> """,
year=2015)
# test that the abstract was created correctly
self.assertEqual(Abstract.objects.count(), starting_abstract_count+1)
self.assertEqual(new_abstract.title, 'Silly Walks of the Neanderthals')
self.assertEqual(new_abstract.year, 2015)
starting_author_count = Author.objects.count()
new_author = Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Bob",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="[email protected]")
self.assertEqual(Author.objects.count(), starting_author_count+1)
self.assertEqual(new_author.last_name, 'Reed')
self.assertEqual(new_author.abstract, new_abstract)
self.assertEqual(new_author.full_name(), "Bob Reed")
self.assertEqual(new_author.author_rank, 1)
author2 = Author.objects.create(abstract=new_abstract, author_rank=2, first_name='Denné',
last_name='Jéhnson', institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="[email protected]")
self.assertEqual(Author.objects.count(), starting_author_count+2)
self.assertEqual(author2.last_name, 'Jéhnson')
self.assertEqual(author2.abstract, new_abstract)
self.assertEqual(author2.full_name(), 'Denné Jéhnson')
self.assertEqual(author2.author_rank, 2)
class AbstractMethodTests(TestCase):
def setUp(self):
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
# create an abstract for the meeting
new_abstract = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='[email protected]',
presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p>Silly walks in Neanderthals.</p> """,
year=2015)
Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Bob",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="[email protected]")
Author.objects.create(abstract=new_abstract, author_rank=2, first_name='Denné',
last_name='Jéhnson', institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="[email protected]")
abstract2 = Abstract.objects.create(meeting_id=san_francisco.id, contact_email='[email protected]',
presentation_type='Poster',
title='∂13 C isotopic values in zombies indicate a C4 diet',
abstract_text="""<p>Yummy plants, ugggh</p> """,
year=2015)
Author.objects.create(abstract=abstract2, author_rank=1, first_name="Archer",
last_name="Flexnick", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="[email protected]")
Author.objects.create(abstract=abstract2, author_rank=2, first_name="Felix",
last_name="Quustz", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="[email protected]")
Author.objects.create(abstract=abstract2, author_rank=3, first_name="Adam",
last_name="Ackworth", institution="University of Transylvania",
department="Anthropology", country="Romania",
email_address="[email protected]")
def test_lead_author_last_name_method(self):
abstract = Abstract.objects.get(title='Silly Walks of the Neanderthals')
self.assertEqual(abstract.lead_author_last_name(), "Reed") # Last name of lead author should be "Reed"
def test_pretty_title(self):
abstract = Abstract.objects.get(title='Silly Walks of the Neanderthals')
self.assertEqual(abstract.pretty_title(), 'Silly Walks of the Neanderthals')
abstract = Abstract.objects.get(title='∂13 C isotopic values in zombies indicate a C4 diet')
self.assertEqual(abstract.pretty_title(), u'\u220213 C isotopic values in zombies indicate a C4 diet')
class AbstractViewTests(TestCase):
def setUp(self):
# Create basic fiber tree
starting_page_count = Page.objects.count()
mainmenu = Page.objects.create(title='mainmenu')
meetings_page = Page.objects.create(title='meetings', parent=mainmenu, url='meetings', template_name='')
# Create abstract fiber page
abstract_submission_page = Page.objects.create(title='abstract submission',
parent=meetings_page, url='abstract')
Page.objects.create(title='Create Abstract', parent=abstract_submission_page, url='add')
self.assertEqual(Page.objects.count(), starting_page_count+4) # test 4 pages saved
# Create 3 meetings with associated fiber pages
calgary = Meeting.objects.create(year=2014, title='Calgary 2014',
location='Calgary, AB', associated_with='AAPA')
calgary.create_fiber_page()
san_francisco = Meeting.objects.create(year=2015, title='San Francisco 2015',
location='San Francisco, CA', associated_with='SAA')
san_francisco.create_fiber_page()
atlanta = Meeting.objects.create(year=2016, title='Atlanta 2016',
location='Atlanta, GA', associated_with='AAPA')
atlanta.create_fiber_page()
self.assertEqual(Page.objects.count(), starting_page_count+7) # test 6 pages saved
# Create an abstract with two authors
self.assertEqual(Meeting.objects.count(), 3)
self.assertEqual(Abstract.objects.count(), 0)
san_francisco = Meeting.objects.get(year=2015)
self.assertEqual(san_francisco.location, 'San Francisco, CA')
new_abstract = Abstract.objects.create(meeting_id=24, contact_email='[email protected]', presentation_type='Paper',
title='Silly Walks of the Neanderthals',
abstract_text="""<p> Test abstract text about silly walks in Neanderthals.</p> """,
year=2015) # create a new abstract for the san francisco meeting
Author.objects.create(abstract=new_abstract, author_rank=1, first_name="Denne",
last_name="Reed", institution="University of Texas at Austin",
department="Anthropology", country="United States of America",
email_address="[email protected]")
Author.objects.create(abstract=new_abstract, author_rank=2, first_name="Bob",
last_name="Frankle", institution="University of Michigan",
department="Anthropology", country="United States of America",
email_address="[email protected]")
def test_create_abstract_view_with_get_method(self):
"""A get request should load a blank version of the form"""
response = self.client.get(reverse('meetings:create_abstract'))
self.assertEqual(response.status_code, 200) # Response should be an HTML page with status code 200
self.assertTemplateUsed(response, 'meetings/abstract.html') # Response should render the abstract.html template
self.assertContains(response, "<form") # Test that the page loads a form
self.assertContains(response, "<p>Author 1<br>") # Test that the page contains an author formset
self.assertContains(response, "input", count=36) # Test that the page contains 36 input elements
class AbstractViewTestsWithData(TestCase):
fixtures = ['fixtures/fiber_data.json', 'fixtures/meetings_data.json']
def setUp(self):
self.factory = RequestFactory()
self.user = User.objects.create(username='bob', email='[email protected]', password='secret')
def test_get(self):
request = self.factory.get(reverse('meetings:create_abstract'))
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200)
def test_abstract_create_view_with_empty_post_data(self):
request = self.factory.post(reverse('meetings:create_abstract'), {})
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200)
def test_abstract_lead_last_name_sorting_method(self):
queryset = Abstract.objects.filter(pk__in=[31, 33, 34, 35, 36]) # grab 5 posters from 2014
name_list = []
for q in queryset: name_list.append(q.lead_author_last_name())
self.assertEqual(len(name_list), 5)
self.assertEqual(name_list, ["Schillinger", "Harris", "Harris", "Key", "Werner"])
ordered_queryset = queryset.order_by('author__author_rank',
'author__last_name', 'author__first_name')[0:queryset.count()]
self.assertEqual(len(ordered_queryset), len(queryset))
ordered_name_list = []
for q in ordered_queryset: ordered_name_list.append(q.lead_author_last_name())
self.assertEqual(ordered_name_list, ["Harris", "Harris", "Key", "Schillinger", "Werner"])
def test_abstract_create_view_with_completed_form(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': '[email protected]',
'confirm_email': '[email protected]',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': '[email protected]',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
starting_abstract_count = Abstract.objects.filter(year=2015).count()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that successful submit returns redirect
def test_abstract_with_missing_title(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
#'title': """<p>A test title with strange characters ∂13C and species names
#like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': '[email protected]',
'confirm_email': '[email protected]',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': '[email protected]',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
self.assertEqual(response.context_data['form'].errors['title'][0], u'This field is required.')
def test_abstract_with_missing_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': '[email protected]',
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',<|fim▁hole|> 'author_set-0-email_address': '[email protected]',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
self.assertEqual(response.context_data['form'].errors['confirm_email'][0], u'This field is required.')
def test_abstract_with_malformed_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': '[email protected]',
'confirm_email': 'denne.reed',
'author_set-0-name': 'Denne Reed', # invalid email address
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': '[email protected]',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again
# test that the form contains an appropriate error message
self.assertEqual(response.context_data['form'].errors['confirm_email'][0], u'Enter a valid email address.')
def test_abstract_when_contact_email_not_same_as_confirmation_email(self):
form_data = {
'meeting': 24,
'year': 2015,
'presentation_type': 'Paper',
'title': """<p>A test title with strange characters ∂13C and species names
like <em>Australopithecus afarensis</em></p>""",
'abstract_text': """<p>You think water moves fast? You should see ice. It moves like it has a mind. Like it
knows it killed the world once and got a taste for murder. After the avalanche, it took us a week to climb
out. Now, I don't know exactly when we turned on each other, but I know that seven of us survived the
slide... and only five made it out. Now we took an oath, that I'm breaking now. We said we'd say it was
the snow that killed the other two, but it wasn't. Nature is lethal but it doesn't hold a candle to man.
</p>""",
'acknowledgements': 'I gratefully acknowledge the academy.',
'contact_email': '[email protected]', # valid email address
'confirm_email': '[email protected]', # valid email address, but not same as above
'author_set-0-name': 'Denne Reed',
'author_set-0-department': 'Anthropology',
'author_set-0-institution': 'University of Texas at Austin',
'author_set-0-country': 'United States of America',
'author_set-0-email_address': '[email protected]',
}
request = self.factory.post(reverse('meetings:create_abstract'), form_data)
request.user = AnonymousUser()
response = AbstractCreateView.as_view()(request)
self.assertEqual(response.status_code, 200) # test that on submit we return the form again<|fim▁end|> | 'author_set-0-country': 'United States of America', |
<|file_name|>description.py<|end_file_name|><|fim▁begin|># ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
Template file used by the OPF Experiment Generator to generate the actual
description.py file by replacing $XXXXXXXX tokens with desired values.
This description.py file was generated by:
'~/nta/eng/lib/python2.6/site-packages/nupic/frameworks/opf/expGenerator/ExpGenerator.py'
"""
from nupic.frameworks.opf.expdescriptionapi import ExperimentDescriptionAPI
from nupic.frameworks.opf.expdescriptionhelpers import (
updateConfigFromSubConfig,
applyValueGettersToContainer,
DeferredDictLookup)
from nupic.frameworks.opf.clamodelcallbacks import *
from nupic.frameworks.opf.metrics import MetricSpec
from nupic.frameworks.opf.opfutils import (InferenceType,
InferenceElement)
from nupic.support import aggregationDivide
from nupic.frameworks.opf.opftaskdriver import (
IterationPhaseSpecLearnOnly,
IterationPhaseSpecInferOnly,
IterationPhaseSpecLearnAndInfer)
# Model Configuration Dictionary:
#
# Define the model parameters and adjust for any modifications if imported
# from a sub-experiment.
#
# These fields might be modified by a sub-experiment; this dict is passed
# between the sub-experiment and base experiment<|fim▁hole|>#
#
# NOTE: Use of DEFERRED VALUE-GETTERs: dictionary fields and list elements
# within the config dictionary may be assigned futures derived from the
# ValueGetterBase class, such as DeferredDictLookup.
# This facility is particularly handy for enabling substitution of values in
# the config dictionary from other values in the config dictionary, which is
# needed by permutation.py-based experiments. These values will be resolved
# during the call to applyValueGettersToContainer(),
# which we call after the base experiment's config dictionary is updated from
# the sub-experiment. See ValueGetterBase and
# DeferredDictLookup for more details about value-getters.
#
# For each custom encoder parameter to be exposed to the sub-experiment/
# permutation overrides, define a variable in this section, using key names
# beginning with a single underscore character to avoid collisions with
# pre-defined keys (e.g., _dsEncoderFieldName2_N).
#
# Example:
# config = dict(
# _dsEncoderFieldName2_N = 70,
# _dsEncoderFieldName2_W = 5,
# dsEncoderSchema = [
# base=dict(
# fieldname='Name2', type='ScalarEncoder',
# name='Name2', minval=0, maxval=270, clipInput=True,
# n=DeferredDictLookup('_dsEncoderFieldName2_N'),
# w=DeferredDictLookup('_dsEncoderFieldName2_W')),
# ],
# )
# updateConfigFromSubConfig(config)
# applyValueGettersToContainer(config)
config = {
# Type of model that the rest of these parameters apply to.
'model': "CLA",
# Version that specifies the format of the config.
'version': 1,
# Intermediate variables used to compute fields in modelParams and also
# referenced from the control section.
'aggregationInfo': {
'fields': [ (u'timestamp', 'first'),
(u'gym', 'first'),
(u'consumption', 'sum')],
'days': 0,
'hours': 1,
'microseconds': 0,
'milliseconds': 0,
'minutes': 0,
'months': 0,
'seconds': 0,
'weeks': 0,
'years': 0},
'predictAheadTime': None,
# Model parameter dictionary.
'modelParams': {
# The type of inference that this model will perform
'inferenceType': 'TemporalMultiStep',
'sensorParams': {
# Sensor diagnostic output verbosity control;
# if > 0: sensor region will print out on screen what it's sensing
# at each step 0: silent; >=1: some info; >=2: more info;
# >=3: even more info (see compute() in py/regions/RecordSensor.py)
'verbosity' : 1,
# Example:
# dsEncoderSchema = [
# DeferredDictLookup('__field_name_encoder'),
# ],
#
# (value generated from DS_ENCODER_SCHEMA)
'encoders': {
u'timestamp_timeOfDay': { 'fieldname': u'timestamp',
'name': u'timestamp_timeOfDay',
'timeOfDay': (21, 1),
'type': 'DateEncoder'},
u'timestamp_dayOfWeek': { 'dayOfWeek': (21, 1),
'fieldname': u'timestamp',
'name': u'timestamp_dayOfWeek',
'type': 'DateEncoder'},
u'timestamp_weekend': { 'fieldname': u'timestamp',
'name': u'timestamp_weekend',
'type': 'DateEncoder',
'weekend': 21},
u'consumption': { 'clipInput': True,
'fieldname': u'consumption',
'n': 100,
'name': u'consumption',
'type': 'AdaptiveScalarEncoder',
'w': 21},
},
# A dictionary specifying the period for automatically-generated
# resets from a RecordSensor;
#
# None = disable automatically-generated resets (also disabled if
# all of the specified values evaluate to 0).
# Valid keys is the desired combination of the following:
# days, hours, minutes, seconds, milliseconds, microseconds, weeks
#
# Example for 1.5 days: sensorAutoReset = dict(days=1,hours=12),
#
# (value generated from SENSOR_AUTO_RESET)
'sensorAutoReset' : { u'days': 0, u'hours': 0},
},
'spEnable': True,
'spParams': {
# SP diagnostic output verbosity control;
# 0: silent; >=1: some info; >=2: more info;
'spVerbosity' : 0,
'globalInhibition': 1,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
'inputWidth': 0,
# SP inhibition control (absolute value);
# Maximum number of active columns in the SP region's output (when
# there are more, the weaker ones are suppressed)
'numActivePerInhArea': 40,
'seed': 1956,
# coincInputPoolPct
# What percent of the columns's receptive field is available
# for potential synapses. At initialization time, we will
# choose coincInputPoolPct * (2*coincInputRadius+1)^2
'coincInputPoolPct': 0.5,
# The default connected threshold. Any synapse whose
# permanence value is above the connected threshold is
# a "connected synapse", meaning it can contribute to the
# cell's firing. Typical value is 0.10. Cells whose activity
# level before inhibition falls below minDutyCycleBeforeInh
# will have their own internal synPermConnectedCell
# threshold set below this default value.
# (This concept applies to both SP and TP and so 'cells'
# is correct here as opposed to 'columns')
'synPermConnected': 0.1,
'synPermActiveInc': 0.1,
'synPermInactiveDec': 0.01,
},
# Controls whether TP is enabled or disabled;
# TP is necessary for making temporal predictions, such as predicting
# the next inputs. Without TP, the model is only capable of
# reconstructing missing sensor inputs (via SP).
'tpEnable' : True,
'tpParams': {
# TP diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
# (see verbosity in nta/trunk/py/nupic/research/TP.py and TP10X*.py)
'verbosity': 0,
# Number of cell columns in the cortical region (same number for
# SP and TP)
# (see also tpNCellsPerCol)
'columnCount': 2048,
# The number of cells (i.e., states), allocated per column.
'cellsPerColumn': 32,
'inputWidth': 2048,
'seed': 1960,
# Temporal Pooler implementation selector (see _getTPClass in
# CLARegion.py).
'temporalImp': 'cpp',
# New Synapse formation count
# NOTE: If None, use spNumActivePerInhArea
#
# TODO: need better explanation
'newSynapseCount': 20,
# Maximum number of synapses per segment
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSynapsesPerSegment': 32,
# Maximum number of segments per cell
# > 0 for fixed-size CLA
# -1 for non-fixed-size CLA
#
# TODO: for Ron: once the appropriate value is placed in TP
# constructor, see if we should eliminate this parameter from
# description.py.
'maxSegmentsPerCell': 128,
# Initial Permanence
# TODO: need better explanation
'initialPerm': 0.21,
# Permanence Increment
'permanenceInc': 0.1,
# Permanence Decrement
# If set to None, will automatically default to tpPermanenceInc
# value.
'permanenceDec' : 0.1,
'globalDecay': 0.0,
'maxAge': 0,
# Minimum number of active synapses for a segment to be considered
# during search for the best-matching segments.
# None=use default
# Replaces: tpMinThreshold
'minThreshold': 12,
# Segment activation threshold.
# A segment is active if it has >= tpSegmentActivationThreshold
# connected synapses that are active due to infActiveState
# None=use default
# Replaces: tpActivationThreshold
'activationThreshold': 16,
'outputType': 'normal',
# "Pay Attention Mode" length. This tells the TP how many new
# elements to append to the end of a learned sequence at a time.
# Smaller values are better for datasets with short sequences,
# higher values are better for datasets with long sequences.
'pamLength': 1,
},
'clParams': {
# Classifier implementation selection.
'implementation': 'cpp',
'regionName' : 'CLAClassifierRegion',
# Classifier diagnostic output verbosity control;
# 0: silent; [1..6]: increasing levels of verbosity
'clVerbosity' : 0,
# This controls how fast the classifier learns/forgets. Higher values
# make it adapt faster and forget older patterns faster.
'alpha': 0.001,
# This is set after the call to updateConfigFromSubConfig and is
# computed from the aggregationInfo and predictAheadTime.
'steps': '1,5',
},
'trainSPNetOnlyIfRequested': False,
},
}
# end of config dictionary
# Adjust base config dictionary for any modifications if imported from a
# sub-experiment
updateConfigFromSubConfig(config)
# Compute predictionSteps based on the predictAheadTime and the aggregation
# period, which may be permuted over.
if config['predictAheadTime'] is not None:
predictionSteps = int(round(aggregationDivide(
config['predictAheadTime'], config['aggregationInfo'])))
assert (predictionSteps >= 1)
config['modelParams']['clParams']['steps'] = str(predictionSteps)
# Adjust config by applying ValueGetterBase-derived
# futures. NOTE: this MUST be called after updateConfigFromSubConfig() in order
# to support value-getter-based substitutions from the sub-experiment (if any)
applyValueGettersToContainer(config)
control = {
# The environment that the current model is being run in
"environment": 'grok',
# Input stream specification per py/nupic/cluster/database/StreamDef.json.
#
'dataset' : { 'aggregation': config['aggregationInfo'],
u'info': u'test_hotgym',
u'streams': [ { u'columns': [u'*'],
u'info': u'hotGym.csv',
u'last_record': 100,
u'source': u'file://extra/hotgym/hotgym.csv'}],
u'version': 1},
# Iteration count: maximum number of iterations. Each iteration corresponds
# to one record from the (possibly aggregated) dataset. The task is
# terminated when either number of iterations reaches iterationCount or
# all records in the (possibly aggregated) database have been processed,
# whichever occurs first.
#
# iterationCount of -1 = iterate over the entire dataset
'iterationCount' : -1,
# A dictionary containing all the supplementary parameters for inference
"inferenceArgs":{u'predictedField': u'consumption', u'predictionSteps': [1, 5]},
# Metrics: A list of MetricSpecs that instantiate the metrics that are
# computed for this experiment
'metrics':[
MetricSpec(field=u'consumption', metric='multiStep', inferenceElement='multiStepBestPredictions', params={'window': 1000, 'steps': [1, 5], 'errorMetric': 'aae'}),
MetricSpec(field=u'consumption', metric='multiStep', inferenceElement='multiStepBestPredictions', params={'window': 1000, 'steps': [1, 5], 'errorMetric': 'altMAPE'}),
],
# Logged Metrics: A sequence of regular expressions that specify which of
# the metrics from the Inference Specifications section MUST be logged for
# every prediction. The regex's correspond to the automatically generated
# metric labels. This is similar to the way the optimization metric is
# specified in permutations.py.
'loggedMetrics': ['.*'],
}
################################################################################
################################################################################
descriptionInterface = ExperimentDescriptionAPI(modelConfig=config,
control=control)<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Keep track of all the git repositories on your machine.
//!
//! This crate houses the binary and library for the git-global subcommand, a
//! way to find, query statuses, and gain other insights about all the git repos
//! on your machine. The binary can be installed with cargo: `cargo install
//! git-global`.
//!
//! # Command-line Usage
//!
//! ```bash
//! $ git global [status] # show `git status -s` for all your git repos
//! $ git global info # show information about git-global itself
//! $ git global list # show all git repos git-global knows about
//! $ git global scan # search your filesystem for git repos and update cache
//! ```
//!
//! # Public Interface
//!
//! The git-global project's primary goal is to produce a useful binary. There's
//! no driving force to provide a very good library for other Rust projects to
//! use, so this documentation primarily serves to illustrate how the codebase
//! is structured. (If a library use-case arises, however, that would be fine.)
//!
//! The [`Repo`] struct is a git repository that is identified by the full path
//! to its base directory (instead of, say, its `.git` directory).
//!
//! The [`Config`] struct holds a user's git-global configuration information,
//! which usually merges some default values with values in the `[global]`
//! section of the user's global `.gitconfig` file. It provides access to the
//! list of known `Repo`s via the `get_repos()` method, which reads from a cache
//! file, populating it for the first time after performing a filesystem scan,
//! if necessary.
//!
//! A [`Report`] contains messages added by a subcommand about the overall
//! results of what it did, as well as messages about the specific `Repo`s to
//! which that subcommand applies. All subcommand modules expose an `execute()`
//! function that takes ownership of a `Config` struct and returns a
//! `Result<Report>`. These subcommands live in the [`subcommands`][subcommands]
//! module.
//!
//! The [`run_from_command_line()`][rfcl] function handles running git-global
//! from the command line and serves as the entry point for the binary.
//!
//! [`Config`]: struct.Config.html
//! [`Repo`]: struct.Repo.html
//! [`Report`]: struct.Report.html
//! [rfcl]: fn.run_from_command_line.html
//! [subcommands]: subcommands/index.html
mod cli;
mod config;
mod errors;
mod repo;
mod report;
pub mod subcommands; // Using `pub mod` so we see the docs.
pub use cli::run_from_command_line;
pub use config::Config;<|fim▁hole|><|fim▁end|> | pub use errors::{GitGlobalError, Result};
pub use repo::Repo;
pub use report::Report; |
<|file_name|>operations.go<|end_file_name|><|fim▁begin|>package costmanagement
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"net/http"
)
// OperationsClient is the client for the Operations methods of the Costmanagement service.
type OperationsClient struct {
BaseClient
}
// NewOperationsClient creates an instance of the OperationsClient client.
func NewOperationsClient(subscriptionID string) OperationsClient {
return NewOperationsClientWithBaseURI(DefaultBaseURI, subscriptionID)
}
// NewOperationsClientWithBaseURI creates an instance of the OperationsClient client.
func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string) OperationsClient {
return OperationsClient{NewWithBaseURI(baseURI, subscriptionID)}
}
// List lists all of the available consumption REST API operations.
func (client OperationsClient) List(ctx context.Context) (result OperationListResultPage, err error) {
result.fn = client.listNextResults
req, err := client.ListPreparer(ctx)
if err != nil {
err = autorest.NewErrorWithError(err, "costmanagement.OperationsClient", "List", nil, "Failure preparing request")
return
}
resp, err := client.ListSender(req)
if err != nil {
result.olr.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "costmanagement.OperationsClient", "List", resp, "Failure sending request")
return
}
result.olr, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "costmanagement.OperationsClient", "List", resp, "Failure responding to request")
}
return
}
// ListPreparer prepares the List request.
func (client OperationsClient) ListPreparer(ctx context.Context) (*http.Request, error) {
const APIVersion = "2018-05-31"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPath("/providers/Microsoft.CostManagement/operations"),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// ListSender sends the List request. The method will close the
// http.Response Body if it receives an error.
func (client OperationsClient) ListSender(req *http.Request) (*http.Response, error) {
return autorest.SendWithSender(client, req,
autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// ListResponder handles the response to the List request. The method always
// closes the http.Response Body.<|fim▁hole|> resp,
client.ByInspecting(),
azure.WithErrorUnlessStatusCode(http.StatusOK),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}
// listNextResults retrieves the next set of results, if any.
func (client OperationsClient) listNextResults(lastResults OperationListResult) (result OperationListResult, err error) {
req, err := lastResults.operationListResultPreparer()
if err != nil {
return result, autorest.NewErrorWithError(err, "costmanagement.OperationsClient", "listNextResults", nil, "Failure preparing next results request")
}
if req == nil {
return
}
resp, err := client.ListSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
return result, autorest.NewErrorWithError(err, "costmanagement.OperationsClient", "listNextResults", resp, "Failure sending next results request")
}
result, err = client.ListResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "costmanagement.OperationsClient", "listNextResults", resp, "Failure responding to next results request")
}
return
}
// ListComplete enumerates all values, automatically crossing page boundaries as required.
func (client OperationsClient) ListComplete(ctx context.Context) (result OperationListResultIterator, err error) {
result.page, err = client.List(ctx)
return
}<|fim▁end|> | func (client OperationsClient) ListResponder(resp *http.Response) (result OperationListResult, err error) {
err = autorest.Respond( |
<|file_name|>chromium_code_search.py<|end_file_name|><|fim▁begin|># Copyright 2017 Josh Karlin. All rights reserved.
# Use of this source code is governed by the Apache license found in the LICENSE
# file.
import argparse
import datetime
import getopt
import json
import sys
import tempfile
import threading
import time
import urllib.request
import urllib.parse
gFileCache = None;
# A key/value store that stores objects to disk in temporary objects
# for 30 minutes.
class FileCache:
def __init__(self):
self.store = {}
threading.Timer(15 * 60, self.gc).start();
def put(self, url, data):
f = tempfile.TemporaryFile();
f.write(data);
self.store[url] = (f, datetime.datetime.now());
def get(self, url):
if not url in self.store:
return ''
(f, timestamp) = self.store[url]<|fim▁hole|> return f.read();
def gc(self):
threading.Timer(15 * 60, self.gc).start();
expired = datetime.datetime.now() - datetime.timedelta(minutes=30);
remove = []
for url, (f, timestamp) in self.store.items():
if timestamp < expired:
remove.append(url)
for url in remove:
self.store.pop(url);
def cacheResponses(should_cache):
global gFileCache
if not should_cache:
gFileCache = None;
return
if gFileCache:
return
gFileCache = FileCache();
# Retrieve the url by first trying to cache and falling back to the network.
def retrieve(url):
global gFileCache
if gFileCache:
cached_response = gFileCache.get(url);
if (cached_response):
return cached_response.decode('utf8');
response = None
try:
if len(url) > 1500:
short_url = url.split('?')[0]
data = url.split('?')[1]
response = urllib.request.urlopen(short_url, data=data.encode('utf-8'), timeout=3)
else:
response = urllib.request.urlopen(url, timeout=3)
except error:
return ''
result = response.read()
if gFileCache:
gFileCache.put(url, result);
return result.decode('utf8');
def getSignatureFor(src_file, method):
url = ('https://cs.chromium.org/codesearch/json'
'?annotation_request=b'
'&file_spec=b'
'&package_name=chromium'
'&name={file_name}'
'&file_spec=e'
'&type=b'
'&id=1'
'&type=e'
'&label='
'&follow_branches=false'
'&annotation_request=e')
url = url.format(file_name=urllib.parse.quote(src_file, safe=''))
result = retrieve(url);
if not result:
return ''
result = json.loads(result)['annotation_response'][0]
for snippet in result.get('annotation', []):
if not 'type' in snippet:
continue
if 'xref_signature' in snippet:
signature = snippet['xref_signature']['signature']
if '%s(' % method in signature:
return signature
elif 'internal_link' in snippet:
signature = snippet['internal_link']['signature']
if '::%s' % method in signature or 'class-%s' % method in signature:
return signature
return ''
def getCallGraphFor(signature):
url = ('https://cs.chromium.org/codesearch/json'
'?call_graph_request=b'
'&signature={signature}'
'&file_spec=b'
'&package_name=chromium'
'&name=.'
'&file_spec=e'
'&max_num_results=500'
'&call_graph_request=e')
url = url.format(signature=urllib.parse.quote(signature, safe=''))
result = retrieve(url);
if not result:
return {}
result = json.loads(result)['call_graph_response'][0];
node = result['node'];
callers = [];
last_signature = ''
if not 'children' in node:
return callers
for child in node['children']:
if child['signature'] == last_signature:
continue
if not 'snippet_file_path' in child:
continue
caller = {}
caller['filename'] = child['snippet_file_path'];
caller['line'] = child['call_site_range']['start_line']
caller['col'] = child['call_site_range']['start_column']
caller['text'] = child['snippet']['text']['text']
caller['calling_method'] = child['identifier']
caller['calling_signature'] = child['signature']
last_signature = child['signature']
caller['display_name'] = child['display_name']
callers.append(caller)
return callers
def getRefForMatch(filename, match):
ref = {'filename': filename, 'line': match['line_number'], 'signature': match['signature']}
if 'line_text' in match:
ref['line_text'] = match['line_text']
return ref;
def getXrefsFor(signature):
url = ('https://cs.chromium.org/codesearch/json'
'?xref_search_request=b'
'&query={signature}'
'&file_spec=b'
'&name=.'
'&package_name=chromium'
'&file_spec=e'
'&max_num_results=500'
'&xref_search_request=e')
url = url.format(signature=urllib.parse.quote(signature, safe=''))
result = retrieve(url);
if not result:
return {}
result = json.loads(result)['xref_search_response'][0]
status = result['status']
if not 'search_result' in result:
return {}
search_results = result['search_result']
xrefs = {}
for file_result in search_results:
filename = file_result['file']['name']
for match in file_result['match']:
if match['type'] == 'HAS_DEFINITION':
xrefs['definition'] = getRefForMatch(filename, match);
elif match['type'] == 'HAS_DECLARATION':
xrefs['declaration'] = getRefForMatch(filename, match);
elif match['type'] == 'OVERRIDDEN_BY':
xrefs.setdefault('overrides', []);
xrefs['overrides'].append(getRefForMatch(filename, match));
elif match['type'] == 'REFERENCED_AT':
xrefs.setdefault('references', []);
xrefs['references'].append(getRefForMatch(filename, match));
return xrefs
def logAndExit(msg):
print(msg);
sys.exit(2);
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Searches Chromium Code Search for X-Refs.')
parser.add_argument('-p', '--path',
help='The path to this file starting with src/')
parser.add_argument('-w', '--word',
help='The word to search for in the file denoted by the path argument. You must also specify -p')
parser.add_argument('-s', '--signature',
help='A signature provided from a previous search. No -p or -w arguments required.')
args = parser.parse_args()
signature = args.signature;
results = {}
if not signature:
if bool(args.path) ^ bool(args.word):
print("Both path and word must be supplied if one is supplied");
sys.exit(2);
signature = getSignatureFor(args.path, args.word);
results['signature'] = signature
if not signature:
logAndExit("Could not find signature for %s" % (args.word))
results['xrefs'] = getXrefsFor(signature);
results['callers'] = getCallGraphFor(signature);
print(json.dumps(results))<|fim▁end|> | f.seek(0); |
<|file_name|>add_vcf_to_project.py<|end_file_name|><|fim▁begin|>import os
from xbrowse_server import xbrowse_controls
from django.core.management.base import BaseCommand
from xbrowse_server.base.models import Project, Individual, VCFFile
from xbrowse_server import sample_management
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('args', nargs='*')
parser.add_argument('--indiv-id')<|fim▁hole|> def handle(self, *args, **options):
project_id = args[0]
project = Project.objects.get(project_id=project_id)
vcf_file_path = os.path.abspath(args[1])
vcf_file = VCFFile.objects.get_or_create(file_path=vcf_file_path)[0]
if options.get('clear'):
for individual in project.individual_set.all():
individual.vcf_files.clear()
if options.get('indiv_id'):
individual = Individual.objects.get(
project=project,
indiv_id=options.get('indiv_id')
)
sample_management.add_vcf_file_to_individual(individual, vcf_file)
else:
sample_management.add_vcf_file_to_project(project, vcf_file)
if options.get('load'):
print("Loading VCF into project store")
xbrowse_controls.load_project(project_id, vcf_files=[vcf_file_path])
print("Loading VCF datastore")
xbrowse_controls.load_project_datastore(project_id, vcf_files=[vcf_file_path])<|fim▁end|> | parser.add_argument('--cohort-id')
parser.add_argument('--clear', action="store_true", help="Whether to clear any previously-added VCF paths before adding this one")
parser.add_argument('--load', action="store_true", help="Whether to also load the VCF data, and not just add record its path in the meta-data tables")
|
<|file_name|>14-builder-uglify-badmodule-cmd.js<|end_file_name|><|fim▁begin|>var vows = require('vows'),
assert = require('assert'),
path = require('path'),
fs = require('fs'),
exec = require('child_process').exec,
base = path.join(__dirname, 'assets/badmodule/'),
buildBase = path.join(base, 'build'),
srcBase = path.join(base, 'src/foo'),
rimraf = require('rimraf');
var tests = {
'clean build': {
topic: function() {
rimraf(path.join(buildBase, 'foo'), this.callback);
},
'should not have build dir and': {
topic: function() {
var self = this;<|fim▁hole|> });
},
'should not have build/foo': function(foo, err) {
assert.isNotNull(err);
assert.equal(err.code, 'ENOENT');
},
'should build foo and': {
topic: function() {
var self = this,
child;
process.chdir(path.resolve(base, srcBase));
child = exec('../../../../../bin/shifter --no-global-config', function (error, stdout, stderr) {
self.callback(null, {
error: error,
stderr: stderr
});
});
},
'should fail with an error code 1': function (topic) {
assert.equal(topic.error.code, 1);
},
'should fail with an error message': function(topic) {
assert.isNotNull(topic.stderr);
}
}
}
}
};
vows.describe('building badmodule with UglifyJS via command line').addBatch(tests).export(module);<|fim▁end|> | fs.stat(path.join(buildBase, 'foo'), function(err) {
self.callback(null, err); |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
Settings and configuration for Django.
Values will be read from the module specified by the DJANGO_SETTINGS_MODULE environment
variable, and then from django.conf.global_settings; see the global settings file for
a list of all possible variables.
"""
import logging
import os
import sys
import time # Needed for Windows
import warnings
from django.conf import global_settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.functional import LazyObject, empty
from django.utils import importlib
from django.utils.module_loading import import_by_path
from django.utils import six
ENVIRONMENT_VARIABLE = "DJANGO_SETTINGS_MODULE"
class LazySettings(LazyObject):
"""
A lazy proxy for either global Django settings or a custom settings object.
The user can manually configure settings prior to using them. Otherwise,
Django uses the settings module pointed to by DJANGO_SETTINGS_MODULE.
"""
def _setup(self, name=None):
"""
Load the settings module pointed to by the environment variable. This
is used the first time we need any settings at all, if the user has not
previously configured the settings manually.
"""
try:
settings_module = os.environ[ENVIRONMENT_VARIABLE]
if not settings_module: # If it's set but is an empty string.
raise KeyError
except KeyError:
desc = ("setting %s" % name) if name else "settings"
raise ImproperlyConfigured(
"Requested %s, but settings are not configured. "
"You must either define the environment variable %s "
"or call settings.configure() before accessing settings."
% (desc, ENVIRONMENT_VARIABLE))
self._wrapped = Settings(settings_module)
self._configure_logging()
def __getattr__(self, name):
if self._wrapped is empty:
self._setup(name)
return getattr(self._wrapped, name)
def _configure_logging(self):
"""
Setup logging from LOGGING_CONFIG and LOGGING settings.
"""
if not sys.warnoptions:
try:
# Route warnings through python logging
logging.captureWarnings(True)
# Allow DeprecationWarnings through the warnings filters
warnings.simplefilter("default", DeprecationWarning)
except AttributeError:
# No captureWarnings on Python 2.6, DeprecationWarnings are on anyway
pass
if self.LOGGING_CONFIG:
from django.utils.log import DEFAULT_LOGGING
# First find the logging configuration function ...
logging_config_func = import_by_path(self.LOGGING_CONFIG)
logging_config_func(DEFAULT_LOGGING)
# ... then invoke it with the logging settings
if self.LOGGING:
logging_config_func(self.LOGGING)
def configure(self, default_settings=global_settings, **options):
"""
Called to manually configure the settings. The 'default_settings'
parameter sets where to retrieve any unspecified values from (its
argument must support attribute access (__getattr__)).
"""
if self._wrapped is not empty:
raise RuntimeError('Settings already configured.')
holder = UserSettingsHolder(default_settings)
for name, value in options.items():
setattr(holder, name, value)
self._wrapped = holder
self._configure_logging()
@property
def configured(self):
"""
Returns True if the settings have already been configured.
"""
return self._wrapped is not empty
class BaseSettings(object):
"""
Common logic for settings whether set by a module or by the user.
"""
def __setattr__(self, name, value):
if name in ("MEDIA_URL", "STATIC_URL") and value and not value.endswith('/'):
raise ImproperlyConfigured("If set, %s must end with a slash" % name)
elif name == "ALLOWED_INCLUDE_ROOTS" and isinstance(value, six.string_types):
raise ValueError("The ALLOWED_INCLUDE_ROOTS setting must be set "
"to a tuple, not a string.")
object.__setattr__(self, name, value)
class Settings(BaseSettings):
def __init__(self, settings_module):
# update this dict from global settings (but only for ALL_CAPS settings)
for setting in dir(global_settings):<|fim▁hole|> # store the settings module in case someone later cares
self.SETTINGS_MODULE = settings_module
try:
mod = importlib.import_module(self.SETTINGS_MODULE)
except ImportError as e:
raise ImportError(
"Could not import settings '%s' (Is it on sys.path? Is there an import error in the settings file?): %s"
% (self.SETTINGS_MODULE, e)
)
# Settings that should be converted into tuples if they're mistakenly entered
# as strings.
tuple_settings = ("INSTALLED_APPS", "TEMPLATE_DIRS")
for setting in dir(mod):
if setting == setting.upper():
setting_value = getattr(mod, setting)
if setting in tuple_settings and \
isinstance(setting_value, six.string_types):
warnings.warn("The %s setting must be a tuple. Please fix your "
"settings, as auto-correction is now deprecated." % setting,
DeprecationWarning, stacklevel=2)
setting_value = (setting_value,) # In case the user forgot the comma.
setattr(self, setting, setting_value)
if not self.SECRET_KEY:
raise ImproperlyConfigured("The SECRET_KEY setting must not be empty.")
if hasattr(time, 'tzset') and self.TIME_ZONE:
# When we can, attempt to validate the timezone. If we can't find
# this file, no check happens and it's harmless.
zoneinfo_root = '/usr/share/zoneinfo'
if (os.path.exists(zoneinfo_root) and not
os.path.exists(os.path.join(zoneinfo_root, *(self.TIME_ZONE.split('/'))))):
raise ValueError("Incorrect timezone setting: %s" % self.TIME_ZONE)
# Move the time zone info into os.environ. See ticket #2315 for why
# we don't do this unconditionally (breaks Windows).
os.environ['TZ'] = self.TIME_ZONE
time.tzset()
class UserSettingsHolder(BaseSettings):
"""
Holder for user configured settings.
"""
# SETTINGS_MODULE doesn't make much sense in the manually configured
# (standalone) case.
SETTINGS_MODULE = None
def __init__(self, default_settings):
"""
Requests for configuration variables not in this class are satisfied
from the module specified in default_settings (if possible).
"""
self.__dict__['_deleted'] = set()
self.default_settings = default_settings
def __getattr__(self, name):
if name in self._deleted:
raise AttributeError
return getattr(self.default_settings, name)
def __setattr__(self, name, value):
self._deleted.discard(name)
return super(UserSettingsHolder, self).__setattr__(name, value)
def __delattr__(self, name):
self._deleted.add(name)
return super(UserSettingsHolder, self).__delattr__(name)
def __dir__(self):
return list(self.__dict__) + dir(self.default_settings)
settings = LazySettings()<|fim▁end|> | if setting == setting.upper():
setattr(self, setting, getattr(global_settings, setting))
|
<|file_name|>TestMNITagPoints.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Test label reading from an MNI tag file
#
# The current directory must be writeable.
#
try:
fname = "mni-tagtest.tag"
channel = open(fname, "wb")
channel.close()
# create some random points in a sphere
#
sphere1 = vtk.vtkPointSource()
sphere1.SetNumberOfPoints(13)
xform = vtk.vtkTransform()
xform.RotateWXYZ(20, 1, 0, 0)
xformFilter = vtk.vtkTransformFilter()
xformFilter.SetTransform(xform)
xformFilter.SetInputConnection(sphere1.GetOutputPort())
labels = vtk.vtkStringArray()
labels.InsertNextValue("0")
labels.InsertNextValue("1")
labels.InsertNextValue("2")
labels.InsertNextValue("3")
labels.InsertNextValue("Halifax")
labels.InsertNextValue("Toronto")
labels.InsertNextValue("Vancouver")
labels.InsertNextValue("Larry")
labels.InsertNextValue("Bob")
labels.InsertNextValue("Jackie")
labels.InsertNextValue("10")
labels.InsertNextValue("11")
labels.InsertNextValue("12")
weights = vtk.vtkDoubleArray()
weights.InsertNextValue(1.0)
weights.InsertNextValue(1.1)
weights.InsertNextValue(1.2)
weights.InsertNextValue(1.3)
weights.InsertNextValue(1.4)
weights.InsertNextValue(1.5)
weights.InsertNextValue(1.6)
weights.InsertNextValue(1.7)
weights.InsertNextValue(1.8)
weights.InsertNextValue(1.9)
weights.InsertNextValue(0.9)
weights.InsertNextValue(0.8)
weights.InsertNextValue(0.7)
writer = vtk.vtkMNITagPointWriter()
writer.SetFileName(fname)
writer.SetInputConnection(sphere1.GetOutputPort())
writer.SetInputConnection(1, xformFilter.GetOutputPort())
writer.SetLabelText(labels)
writer.SetWeights(weights)
writer.SetComments("Volume 1: sphere points\nVolume 2: transformed points")
writer.Write()
reader = vtk.vtkMNITagPointReader()
reader.CanReadFile(fname)
reader.SetFileName(fname)
textProp = vtk.vtkTextProperty()
textProp.SetFontSize(12)
textProp.SetColor(1.0, 1.0, 0.5)
labelHier = vtk.vtkPointSetToLabelHierarchy()
labelHier.SetInputConnection(reader.GetOutputPort())
labelHier.SetTextProperty(textProp)
labelHier.SetLabelArrayName("LabelText")
labelHier.SetMaximumDepth(15)
labelHier.SetTargetLabelCount(12)
labelMapper = vtk.vtkLabelPlacementMapper()
labelMapper.SetInputConnection(labelHier.GetOutputPort())
labelMapper.UseDepthBufferOff()
labelMapper.SetShapeToRect()
labelMapper.SetStyleToOutline()
labelActor = vtk.vtkActor2D()
labelActor.SetMapper(labelMapper)
glyphSource = vtk.vtkSphereSource()
glyphSource.SetRadius(0.01)
glyph = vtk.vtkGlyph3D()
glyph.SetSourceConnection(glyphSource.GetOutputPort())
glyph.SetInputConnection(reader.GetOutputPort())
mapper = vtk.vtkDataSetMapper()
mapper.SetInputConnection(glyph.GetOutputPort())
actor = vtk.vtkActor()
actor.SetMapper(mapper)
# Create rendering stuff
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
<|fim▁hole|> ren1.SetBackground(0, 0, 0)
renWin.SetSize(300, 300)
renWin.Render()
try:
os.remove(fname)
except OSError:
pass
# render the image
#
# iren.Start()
except IOError:
print "Unable to test the writer/reader."<|fim▁end|> | # Add the actors to the renderer, set the background and size
#
ren1.AddViewProp(actor)
ren1.AddViewProp(labelActor)
|
<|file_name|>test_shell_interactive.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding=utf-8
# Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import pexpect
import pytest
import shlex
import shutil
import socket
import signal
from impala_shell_results import get_shell_cmd_result, cancellation_helper
from subprocess import Popen, PIPE
from tests.common.impala_service import ImpaladService
from tests.verifiers.metric_verifier import MetricVerifier
from time import sleep
SHELL_CMD = "%s/bin/impala-shell.sh" % os.environ['IMPALA_HOME']
SHELL_HISTORY_FILE = os.path.expanduser("~/.impalahistory")
TMP_HISTORY_FILE = os.path.expanduser("~/.impalahistorytmp")
class TestImpalaShellInteractive(object):
"""Test the impala shell interactively"""
def _send_cmd_to_shell(self, p, cmd):
"""Given an open shell process, write a cmd to stdin
This method takes care of adding the delimiter and EOL, callers should send the raw
command.
"""
p.stdin.write("%s;\n" % cmd)
p.stdin.flush()
def _start_new_shell_process(self, args=None):
"""Starts a shell process and returns the process handle"""
cmd = "%s %s" % (SHELL_CMD, args) if args else SHELL_CMD
return Popen(shlex.split(SHELL_CMD), shell=True, stdout=PIPE,
stdin=PIPE, stderr=PIPE)
@classmethod
def setup_class(cls):
if os.path.exists(SHELL_HISTORY_FILE):
shutil.move(SHELL_HISTORY_FILE, TMP_HISTORY_FILE)
@classmethod
def teardown_class(cls):
if os.path.exists(TMP_HISTORY_FILE): shutil.move(TMP_HISTORY_FILE, SHELL_HISTORY_FILE)
@pytest.mark.execute_serially
def test_escaped_quotes(self):
"""Test escaping quotes"""
# test escaped quotes outside of quotes
result = run_impala_shell_interactive("select \\'bc';")
assert "could not match input" in result.stderr
result = run_impala_shell_interactive("select \\\"bc\";")
assert "could not match input" in result.stderr
# test escaped quotes within quotes
result = run_impala_shell_interactive("select 'ab\\'c';")
assert "Fetched 1 row(s)" in result.stderr
result = run_impala_shell_interactive("select \"ab\\\"c\";")
assert "Fetched 1 row(s)" in result.stderr
@pytest.mark.execute_serially
def test_cancellation(self):
impalad = ImpaladService(socket.getfqdn())
impalad.wait_for_num_in_flight_queries(0)
command = "select sleep(10000);"
p = self._start_new_shell_process()
self._send_cmd_to_shell(p, command)
sleep(1)
# iterate through all processes with psutil
shell_pid = cancellation_helper()
sleep(2)
os.kill(shell_pid, signal.SIGINT)
result = get_shell_cmd_result(p)
assert impalad.wait_for_num_in_flight_queries(0)
@pytest.mark.execute_serially
def test_unicode_input(self):
"Test queries containing non-ascii input"
# test a unicode query spanning multiple lines
unicode_text = u'\ufffd'
args = "select '%s'\n;" % unicode_text.encode('utf-8')
result = run_impala_shell_interactive(args)
assert "Fetched 1 row(s)" in result.stderr
@pytest.mark.execute_serially
def test_welcome_string(self):
"""Test that the shell's welcome message is only printed once
when the shell is started. Ensure it is not reprinted on errors.
Regression test for IMPALA-1153
"""
result = run_impala_shell_interactive('asdf;')
assert result.stdout.count("Welcome to the Impala shell") == 1
result = run_impala_shell_interactive('select * from non_existent_table;')
assert result.stdout.count("Welcome to the Impala shell") == 1
@pytest.mark.execute_serially
def test_bash_cmd_timing(self):
"""Test existence of time output in bash commands run from shell"""
args = "! ls;"
result = run_impala_shell_interactive(args)
assert "Executed in" in result.stderr
@pytest.mark.execute_serially
def test_reconnect(self):
"""Regression Test for IMPALA-1235
Verifies that a connect command by the user is honoured.
"""
def get_num_open_sessions(impala_service):
"""Helper method to retrieve the number of open sessions"""
return impala_service.get_metric_value('impala-server.num-open-beeswax-sessions')
hostname = socket.getfqdn()
initial_impala_service = ImpaladService(hostname)
target_impala_service = ImpaladService(hostname, webserver_port=25001,
beeswax_port=21001, be_port=22001)
# Get the initial state for the number of sessions.
num_sessions_initial = get_num_open_sessions(initial_impala_service)
num_sessions_target = get_num_open_sessions(target_impala_service)
# Connect to localhost:21000 (default)
p = self._start_new_shell_process()
sleep(2)
# Make sure we're connected <hostname>:21000
assert get_num_open_sessions(initial_impala_service) == num_sessions_initial + 1, \
"Not connected to %s:21000" % hostname
self._send_cmd_to_shell(p, "connect %s:21001" % hostname)
# Wait for a little while
sleep(2)
# The number of sessions on the target impalad should have been incremented.
assert get_num_open_sessions(target_impala_service) == num_sessions_target + 1, \
"Not connected to %s:21001" % hostname
# The number of sessions on the initial impalad should have been decremented.
assert get_num_open_sessions(initial_impala_service) == num_sessions_initial, \
"Connection to %s:21000 should have been closed" % hostname
@pytest.mark.execute_serially
def test_ddl_queries_are_closed(self):
"""Regression test for IMPALA-1317
The shell does not call close() for alter, use and drop queries, leaving them in
flight. This test issues those queries in interactive mode, and checks the debug
webpage to confirm that they've been closed.
TODO: Add every statement type.
"""
TMP_DB = 'inflight_test_db'
TMP_TBL = 'tmp_tbl'
MSG = '%s query should be closed'
NUM_QUERIES = 'impala-server.num-queries'
impalad = ImpaladService(socket.getfqdn())
p = self._start_new_shell_process()
try:
start_num_queries = impalad.get_metric_value(NUM_QUERIES)
self._send_cmd_to_shell(p, 'create database if not exists %s' % TMP_DB)
self._send_cmd_to_shell(p, 'use %s' % TMP_DB)
impalad.wait_for_metric_value(NUM_QUERIES, start_num_queries + 2)
assert impalad.wait_for_num_in_flight_queries(0), MSG % 'use'
self._send_cmd_to_shell(p, 'create table %s(i int)' % TMP_TBL)
self._send_cmd_to_shell(p, 'alter table %s add columns (j int)' % TMP_TBL)
impalad.wait_for_metric_value(NUM_QUERIES, start_num_queries + 4)
assert impalad.wait_for_num_in_flight_queries(0), MSG % 'alter'
self._send_cmd_to_shell(p, 'drop table %s' % TMP_TBL)
impalad.wait_for_metric_value(NUM_QUERIES, start_num_queries + 5)
assert impalad.wait_for_num_in_flight_queries(0), MSG % 'drop'
finally:
run_impala_shell_interactive("drop table if exists %s.%s;" % (TMP_DB, TMP_TBL))
run_impala_shell_interactive("drop database if exists foo;")
@pytest.mark.execute_serially
def test_multiline_queries_in_history(self):
"""Test to ensure that multiline queries with comments are preserved in history
Ensure that multiline queries are preserved when they're read back from history.
Additionally, also test that comments are preserved.
"""
# regex for pexpect, a shell prompt is expected after each command..
prompt_regex = '.*%s:2100.*' % socket.getfqdn()
# readline gets its input from tty, so using stdin does not work.
child_proc = pexpect.spawn(SHELL_CMD)<|fim▁hole|> "select /*comment*/\n1;",
"select\n/*comm\nent*/\n1;"]
for query in queries:
child_proc.expect(prompt_regex)
child_proc.sendline(query)
child_proc.expect(prompt_regex)
child_proc.sendline('quit;')
p = self._start_new_shell_process()
self._send_cmd_to_shell(p, 'history')
result = get_shell_cmd_result(p)
for query in queries:
assert query in result.stderr, "'%s' not in '%s'" % (query, result.stderr)
def run_impala_shell_interactive(command, shell_args=''):
"""Runs a command in the Impala shell interactively."""
cmd = "%s %s" % (SHELL_CMD, shell_args)
# workaround to make Popen environment 'utf-8' compatible
# since piping defaults to ascii
my_env = os.environ
my_env['PYTHONIOENCODING'] = 'utf-8'
p = Popen(shlex.split(cmd), shell=True, stdout=PIPE,
stdin=PIPE, stderr=PIPE, env=my_env)
p.stdin.write(command + "\n")
p.stdin.flush()
return get_shell_cmd_result(p)<|fim▁end|> | queries = ["select\n1--comment;", |
<|file_name|>sort.js<|end_file_name|><|fim▁begin|>if(App.namespace) { App.namespace('Action.Sort', function(App) {
/**
* @namespace App.Action.Sort
* @type {*}
*/
var sort = {};
/** @type {App.Action.Project} Project */
var Project = null;
/**
* button for sorting columns grid
* @type {{}}
*/
sort.icoSort = {};
/**
* button for filter columns grid
* @type {{}}
*/
sort.icoFilter = {};
/**
*
* @type {{}}
*/
sort.dataGroupsusers = {};
/**
*
* @type {{taskGroup: Array, taskName: Array, resGroup: Array, resUsers: Array}}
*/
sort.dynamic = {taskGroup:[],taskName:[],resGroup:[],resUsers:[]};
sort.clearFilter = true;
sort.startFilteringReady = true;
/**
*
* @namespace App.Action.Sort.init
*/
sort.init = function(){
Project = App.Action.Project;
gantt.attachEvent("onColumnResizeEnd", sort.onEventGridResizeEnd);
gantt.attachEvent("onGridResizeEnd", sort.onEventGridResizeEnd);
gantt.attachEvent("onBeforeTaskDisplay", onBeforeTaskDisplay);
sort.dataGroupsusers = App.Module.DataStore.get('groupsusers');
sort.icoSort = {
id: App.query('#ganttsort_id'),
task: App.query('#ganttsort_task'),
start: App.query('#ganttsort_start'),
resource: App.query('#ganttsort_resource')
};
sort.icoFilter = {
task: App.query('#ganttfilter_task'),
resource: App.query('#ganttfilter_resource')
};
sort.icoSort.id.direction = false;
sort.icoSort.id.addEventListener('click', sort.onSortById, false);
sort.icoSort.task.direction = false;
sort.icoSort.task.addEventListener('click', sort.onSortByTask, false);
sort.icoSort.start.direction = false;
sort.icoSort.start.addEventListener('click', sort.onSortByStart, false);
sort.icoSort.resource.direction = false;
sort.icoSort.resource.addEventListener('click', sort.onSortByResource, false);
sort.icoFilter.task.addEventListener('click', sort.onFilterForTask, false);
sort.icoFilter.resource.addEventListener('click', sort.onFilterForResource, false);
sort.applyStyle();
};
sort.applyStyle = function(){
App.node('sortedfilters').style.display = 'block';
sort.icoSort.id.style.left = '5px';
sort.icoSort.task.style.left = '87px';
sort.icoSort.start.style.left = '220px';
sort.icoSort.resource.style.left = '455px';
sort.icoFilter.task.style.left = '107px';
sort.icoFilter.resource.style.left = '475px';
};
/**
* change icons position
* @namespace App.Action.Sort.onEventGridResizeEnd
*/
sort.onEventGridResizeEnd = function () {
setTimeout(function(){
sort.icoSort.id.style.left = sort.getColumnPosition('id') + 'px';
sort.icoSort.task.style.left = sort.getColumnPosition('text') + 'px';
sort.icoSort.start.style.left = sort.getColumnPosition('start_date') + 'px';
sort.icoSort.resource.style.left = sort.getColumnPosition('users') + 'px';
sort.icoFilter.task.style.left = sort.getColumnPosition('text') + 20 + 'px';
sort.icoFilter.resource.style.left = sort.getColumnPosition('users') + 20 + 'px';
}, 600);
};
/**
* @namespace App.Action.Sort.getColumnPosition
* @param column_id
* @returns {*}
*/
sort.getColumnPosition = function(column_id) {
var selector = 'div[column_id='+column_id+']';
return ($(selector).width() / 2 + $(selector).position().left) - 15
};
/**
* Sorted Event By Id
* @param event
*/
sort.onSortById = function(event){
sort.icoSort.id.direction = !sort.icoSort.id.direction;
gantt.sort(sortById);
};
function sortById(task1, task2){
task1 = parseInt(task1.id);
task2 = parseInt(task2.id);
if (sort.icoSort.id.direction){
return task1 > task2 ? 1 : (task1 < task2 ? -1 : 0);
} else {
return task1 > task2 ? -1 : (task1 < task2 ? 1 : 0);
}
}
/**
* Sorted Event By Task
* @param event
*/
sort.onSortByTask = function(event){
sort.icoSort.task.direction = !sort.icoSort.task.direction;
gantt.sort(sortByTask);
};
function sortByTask(task1, task2){
task1 = task1.text;
task2 = task2.text;
if (sort.icoSort.task.direction){
return task1 > task2 ? 1 : (task1 < task2 ? -1 : 0);
} else {
return task1 > task2 ? -1 : (task1 < task2 ? 1 : 0);
}
}
/**
* Sorted Event By Start
* @param event
*/
<|fim▁hole|> };
function sortByStart(task1, task2){
task1 = task1.start_date;
task2 = task2.start_date;
if (sort.icoSort.start.direction) {
return task1 > task2 ? 1 : (task1 < task2 ? -1 : 0);
} else {
return task1 > task2 ? -1 : (task1 < task2 ? 1 : 0);
}
}
/**
* Sorted Event By Resource
* @param event
*/
sort.onSortByResource = function(event){
sort.icoSort.resource.direction = !sort.icoSort.resource.direction;
gantt.sort(sortByResource);
};
function sortByResource(task1, task2){
task1 = task1.users;
task2 = task2.users;
if (sort.icoSort.resource.direction){
return task1 > task2 ? 1 : (task1 < task2 ? -1 : 0);
} else {
return task1 > task2 ? -1 : (task1 < task2 ? 1 : 0);
}
}
sort.createPopup = function(content, specialClass){
var popup = document.createElement('div'),
icoClose = document.createElement('i');
icoClose.className = 'icon-close ocb_close_ico';
icoClose.onclick = function(e){ $(popup).remove() };
popup.className = 'ocb_popup' + (specialClass?' '+specialClass:'');
if(typeof content === 'object') popup.appendChild(content);
else popup.innerHTML = content;
popup.appendChild(icoClose);
return popup;
};
function filterTaskView(){
var wrap = Util.createElement( 'div', null, '<p><b>' +App.t('Filter by task groups or tasks')+ '</b><span class="ico_clear clear_filter"></span></p>');
var inputNameValue = sort.memory('taskname-task');
var inputName = Util.createElement( 'input', {
'id': 'gantt_filter_name',
'type': 'text',
'placeholder': App.t('Enter passphrase to be part of task name'),
'value': ''
} );
if(inputNameValue && inputNameValue.length > 0)
inputName.value = inputNameValue;
inputName.addEventListener('keyup', onFilterClickTask);
var inputGroupValue = sort.memory('taskname-group');
var inputGroup = Util.createElement( 'input', {
'id': 'gantt_filter_group',
'type': 'text',
'placeholder': App.t('Enter passphrase to be part of group name'),
'value': ''
} );
if(inputGroupValue && inputGroupValue.length > 0)
inputGroup.value = inputGroupValue;
inputGroup.addEventListener('keyup', onFilterClickTask);
var clearBtn, clearFields = Util.createElement( 'div', {'class':'ico_clear'});
wrap.appendChild(inputName);
wrap.appendChild(inputGroup);
wrap.appendChild(clearFields);
if(clearBtn = wrap.querySelector('.clear_filter')) {
clearBtn.addEventListener('click',function(event){
inputName.value = inputGroup.value = '';
sort.clearFilter = true;
sort.startFilteringReady = true;
gantt.render();
});
}
return wrap;
}
function filterGroupView(){
var dataGroupsusers = sort.dataGroupsusers;
var clearBtn, inner = Util.createElement('p', {}, '<p><b>' +App.t('Filter by task groups or resource')+ '</b><span class="ico_clear clear_filter"></span></p>');
for(var groupName in dataGroupsusers){
var fragment = createUsersGroup(groupName, dataGroupsusers[groupName]);
inner.appendChild(fragment);
}
if(clearBtn = inner.querySelector('.clear_filter')) {
clearBtn.addEventListener('click',function(event){
/*var i, inputs = inner.querySelectorAll('input[type=checkbox]');
if(typeof inputs === 'object' && inputs.length > 0) {
for( i = 0; i < inputs.length; i++ ){
if(inputs[i].checked === true) inputs[i].checked = false;
}
} */
sort.inputCheckedAll(inner, false);
sort.clearFilter = true;
sort.startFilteringReady = true;
gantt.render();
});
}
return inner
}
/**
* @namespace App.Action.Sort.createInputWrapper
* @type {createInputWrapper}
*/
sort.createInputWrapper = createInputWrapper;
/**
* @namespace App.Action.Sort.createUsersGroup
* @type {createUsersGroup}
*/
sort.createUsersGroup = createUsersGroup;
function createUsersGroup(group, users){
var deprecatedUsers = ['collab_user'];
var usersElements = document.createElement('div'),
oneElement = document.createDocumentFragment();
oneElement.appendChild(createInputWrapper(false, group));
for(var i = 0; i < users.length; i ++) {
// hide deprecated users
if (deprecatedUsers.indexOf(users[i]['uid']) !== -1) continue;
usersElements.appendChild(createInputWrapper(users[i]['uid'], group))
}
oneElement.appendChild(usersElements);
return oneElement
}
function createInputWrapper(user, group) {
var attr_id = user ? 'user_' + group + '_' + user : 'group_' + group;
var attr_gid = group;
var attr_type = user ? 'user' : 'group';
var attr_name = user ? user : group;
var is_checked = sort.memory('resource-' + attr_type + '-' + attr_name) ? true : false;
var wrap = Util.createElement( user ? 'span' : 'div' );
var input = Util.createElement( 'input', {
'id': attr_id,
'name': attr_name,
'type': 'checkbox',
'class': '',
'data-gid': attr_gid,
'data-type': attr_type
});
if(is_checked)
input.checked = true;
input.addEventListener('click', onFilterClickResource);
var label = Util.createElement( 'label', {'for':attr_id},'<span></span>'+ (attr_type == 'user' ? attr_name : '<b>'+attr_name+'</b>' ));
wrap.appendChild(input);
wrap.appendChild(label);
return wrap;
}
function onFilterClickResource (event) {
var id = this.id.split('_')[1];
var name = this.getAttribute('name');
var type = this.getAttribute('data-type');
var group = this.getAttribute('data-gid');
var checked = this.checked;
var uids = sort.getUsersIdsByGroup(name);
//console.log(id, name, checked, type, group, uids);
sort.memory('resource-' + type + '-' + name, checked);
if(type === 'user') {
if (checked && sort.dynamic.resUsers.indexOf(name) === -1) {
sort.dynamic.resUsers.push(name);
//jQuery('input[name="'+name+'"]').checked(true);
jQuery('input[name="'+name+'"][data-type="user"]').prop('checked', true);
//console.log();
} else if (!checked && sort.dynamic.resUsers.indexOf(name) !== -1) {
sort.dynamic.resUsers = Util.rmItArr(name, sort.dynamic.resUsers);
}
} else {
//console.log(group);
//console.log(sort.dataGroupsusers);
if(checked && sort.dataGroupsusers[group]) {
sort.dynamic.resGroup.push(group);
//sort.dynamic.resUsers = Util.arrMerge(sort.dynamic.resUsers, uids);
}
else if(!checked && sort.dynamic.resGroup.indexOf(name) !== -1) {
sort.dynamic.resGroup = Util.rmItArr(group, sort.dynamic.resGroup);
//sort.dynamic.resUsers = Util.arrDiff(sort.dynamic.resUsers, uids);
}
// todo: отк/вкл чик юзеров
//sort.inputCheckedAll(this.parentNode.nextSibling, checked);
/*
if(checked && sort.dynamic.resGroup.indexOf(name) === -1) {
sort.dynamic.resGroup.push(name);
sort.dynamic.resUsers = Util.arrMerge(sort.dynamic.resUsers, uids);
}
else if(!checked && sort.dynamic.resGroup.indexOf(name) !== -1) {
sort.dynamic.resGroup = Util.rmItArr(name, sort.dynamic.resGroup);
sort.dynamic.resUsers = Util.arrDiff(sort.dynamic.resUsers, uids);
}*/
}
// handler for filtering
if(sort.startFilteringReady){
sort.startFilteringReady = false;
Timer.after(1000, sort.startFiltering);
}
}
function onFilterClickTask(event){
var type = this.id == 'gantt_filter_name' ? 'task' : 'group';
var value = this.value;
sort.memory('taskname-' + type, value);
if(type === 'task')
sort.dynamic.taskName[0] = value;
else
sort.dynamic.taskGroup[0] = value;
// handler for filtering
if(sort.startFilteringReady){
sort.startFilteringReady = false;
Timer.after(1000, sort.startFiltering);
}
}
sort.onFilterForTask = function(event){
var popup = sort.createPopup(filterTaskView(), 'filter_tasks');
popup.style.width = '350px';
popup.style.left = '110px';
App.node('topbar').appendChild(popup);
};
sort.onFilterForResource = function(event){
var popup = sort.createPopup(filterGroupView(), 'filter_resources');
popup.style.width = '500px';
popup.style.left = '480px';
App.node('topbar').appendChild(popup);
//console.log(event);
};
/**
* Apply filtering
*/
sort.startFiltering = function(){
sort.startFilteringReady = true;
if( !!sort.dynamic.taskName[0] ||
!!sort.dynamic.taskGroup[0] ||
!Util.isEmpty(sort.dynamic.resUsers) ||
!Util.isEmpty(sort.dynamic.resGroup)
) {
console.log('Filtering enabled');
sort.clearFilter = false;
gantt.refreshData();
}else{
console.log('Filtering disabled');
sort.clearFilter = true;
gantt.refreshData();
}
};
function onBeforeTaskDisplay(id, task) {
if(!sort.clearFilter) {
var taskName = sort.dynamic.taskName[0] ? sort.dynamic.taskName[0].toLowerCase() : false;
var taskGroup = sort.dynamic.taskGroup[0] ? sort.dynamic.taskGroup[0].toLowerCase() : false;
var resUsers = Util.uniqueArr(sort.dynamic.resUsers);
var resGroup = sort.dynamic.resGroup;
var show = false;
var resources = false;
if(!!taskName && gantt.getChildren(id).length == 0 && task.text.toLowerCase().indexOf(taskName) !== -1 ) {
show = true;
}
if(!!taskGroup && gantt.getChildren(id).length > 0 && task.text.toLowerCase().indexOf(taskGroup) !== -1 ) {
show = true;
}
if(!!resUsers) {
for(var iu=0; iu < resUsers.length; iu ++){
resources = App.Action.Chart.getTaskResources(id);
if(resources.users.indexOf(resUsers[iu]) !== -1) {
show = true;
break;
}
}
}
if(!!resGroup) {
for(var ig=0; ig < resGroup.length; ig ++){
resources = App.Action.Chart.getTaskResources(id);
if(resources.groups.indexOf(resGroup[ig]) !== -1) {
show = true;
break;
}
}
}
return show;
}else
return true;
}
/**
* @namespace App.Action.Sort.getUsersIdsByGroup
* @param gid
* @returns {Array}
*/
sort.getUsersIdsByGroup = function(gid){
var ids = [];
var groupsusers = Util.isArr(sort.dataGroupsusers[gid]) ? sort.dataGroupsusers[gid] : [];
for(var i = 0; i < groupsusers.length; i ++ ){
ids.push(groupsusers[i]['uid'])
}
return ids;
};
sort._memoryStore = {};
/**
* @namespace App.Action.Sort.memory
* @param key
* @param value
* @returns {*}
*/
sort.memory = function(key, value){
if(key === undefined && value === undefined)
return sort._memoryStore;
if(value === undefined)
return sort._memoryStore[key]
else
return sort._memoryStore[key] = value
};
/**
* @namespace App.Action.Sort.inputCheckedAll
* @param nodeWhere
* @param checked
*/
sort.inputCheckedAll = function(nodeWhere, checked){
var i, inputs = nodeWhere.querySelectorAll('input[type=checkbox]');
if(typeof inputs === 'object' && inputs.length > 0) {
for( i = 0; i < inputs.length; i++ ){
inputs[i].checked = !!checked;
/*if(!!checked)
if(inputs[i].checked !== true) inputs[i].checked = true;
else
if(inputs[i].checked === true) inputs[i].checked = false;*/
}
}
};
return sort
})}<|fim▁end|> | sort.onSortByStart = function(event){
sort.icoSort.start.direction = !sort.icoSort.start.direction;
gantt.sort(sortByStart);
|
<|file_name|>lib_mk.py<|end_file_name|><|fim▁begin|># See utils/checkpackagelib/readme.txt before editing this file.
# There are already dependency checks during the build, so below check
# functions don't need to check for things already checked by exploring the
# menu options using "make menuconfig" and by running "make" with appropriate
# packages enabled.
import re
from checkpackagelib.base import _CheckFunction
from checkpackagelib.lib import ConsecutiveEmptyLines # noqa: F401
from checkpackagelib.lib import EmptyLastLine # noqa: F401
from checkpackagelib.lib import NewlineAtEof # noqa: F401
from checkpackagelib.lib import TrailingSpace # noqa: F401
# used in more than one check
start_conditional = ["ifdef", "ifeq", "ifndef", "ifneq"]
end_conditional = ["endif"]
class Indent(_CheckFunction):
COMMENT = re.compile("^\s*#")
CONDITIONAL = re.compile("^\s*({})\s".format("|".join(start_conditional + end_conditional)))
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
END_DEFINE = re.compile("^\s*endef\s")
MAKEFILE_TARGET = re.compile("^[^# \t]+:\s")
START_DEFINE = re.compile("^\s*define\s")
def before(self):
self.define = False
self.backslash = False
self.makefile_target = False
def check_line(self, lineno, text):
if self.START_DEFINE.search(text):
self.define = True
return
if self.END_DEFINE.search(text):
self.define = False
return
expect_tabs = False
if self.define or self.backslash or self.makefile_target:
expect_tabs = True
if self.CONDITIONAL.search(text):
expect_tabs = False
# calculate for next line
if self.ENDS_WITH_BACKSLASH.search(text):
self.backslash = True
else:
self.backslash = False
if self.MAKEFILE_TARGET.search(text):
self.makefile_target = True
return
if text.strip() == "":
self.makefile_target = False
return
# comment can be indented or not inside define ... endef, so ignore it
if self.define and self.COMMENT.search(text):
return
if expect_tabs:
if not text.startswith("\t"):
return ["{}:{}: expected indent with tabs"
.format(self.filename, lineno),
text]
else:
if text.startswith("\t"):
return ["{}:{}: unexpected indent with tabs"
.format(self.filename, lineno),
text]
class OverriddenVariable(_CheckFunction):
CONCATENATING = re.compile("^([A-Z0-9_]+)\s*(\+|:|)=\s*\$\(\\1\)")
END_CONDITIONAL = re.compile("^\s*({})".format("|".join(end_conditional)))
OVERRIDING_ASSIGNMENTS = [':=', "="]
START_CONDITIONAL = re.compile("^\s*({})".format("|".join(start_conditional)))
VARIABLE = re.compile("^([A-Z0-9_]+)\s*((\+|:|)=)")
USUALLY_OVERRIDDEN = re.compile("^[A-Z0-9_]+({})".format("|".join([
"_ARCH\s*=\s*",
"_CPU\s*=\s*",
"_SITE\s*=\s*",
"_SOURCE\s*=\s*",
"_VERSION\s*=\s*"])))<|fim▁hole|> self.unconditionally_set = []
self.conditionally_set = []
def check_line(self, lineno, text):
if self.START_CONDITIONAL.search(text):
self.conditional += 1
return
if self.END_CONDITIONAL.search(text):
self.conditional -= 1
return
m = self.VARIABLE.search(text)
if m is None:
return
variable, assignment = m.group(1, 2)
if self.conditional == 0:
if variable in self.conditionally_set:
self.unconditionally_set.append(variable)
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: unconditional override of variable {} previously conditionally set"
.format(self.filename, lineno, variable),
text]
if variable not in self.unconditionally_set:
self.unconditionally_set.append(variable)
return
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: unconditional override of variable {}"
.format(self.filename, lineno, variable),
text]
else:
if variable not in self.unconditionally_set:
self.conditionally_set.append(variable)
return
if self.CONCATENATING.search(text):
return
if self.USUALLY_OVERRIDDEN.search(text):
return
if assignment in self.OVERRIDING_ASSIGNMENTS:
return ["{}:{}: conditional override of variable {}"
.format(self.filename, lineno, variable),
text]
class PackageHeader(_CheckFunction):
def before(self):
self.skip = False
def check_line(self, lineno, text):
if self.skip or lineno > 6:
return
if lineno in [1, 5]:
if lineno == 1 and text.startswith("include "):
self.skip = True
return
if text.rstrip() != "#" * 80:
return ["{}:{}: should be 80 hashes ({}#writing-rules-mk)"
.format(self.filename, lineno, self.url_to_manual),
text,
"#" * 80]
elif lineno in [2, 4]:
if text.rstrip() != "#":
return ["{}:{}: should be 1 hash ({}#writing-rules-mk)"
.format(self.filename, lineno, self.url_to_manual),
text]
elif lineno == 6:
if text.rstrip() != "":
return ["{}:{}: should be a blank line ({}#writing-rules-mk)"
.format(self.filename, lineno, self.url_to_manual),
text]
class RemoveDefaultPackageSourceVariable(_CheckFunction):
packages_that_may_contain_default_source = ["binutils", "gcc", "gdb"]
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
def before(self):
package = self.PACKAGE_NAME.search(self.filename).group(1)
package_upper = package.replace("-", "_").upper()
self.package = package
self.FIND_SOURCE = re.compile(
"^{}_SOURCE\s*=\s*{}-\$\({}_VERSION\)\.tar\.gz"
.format(package_upper, package, package_upper))
def check_line(self, lineno, text):
if self.FIND_SOURCE.search(text):
if self.package in self.packages_that_may_contain_default_source:
return
return ["{}:{}: remove default value of _SOURCE variable "
"({}#generic-package-reference)"
.format(self.filename, lineno, self.url_to_manual),
text]
class SpaceBeforeBackslash(_CheckFunction):
TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH = re.compile(r"^.*( |\t ?)\\$")
def check_line(self, lineno, text):
if self.TAB_OR_MULTIPLE_SPACES_BEFORE_BACKSLASH.match(text.rstrip()):
return ["{}:{}: use only one space before backslash"
.format(self.filename, lineno),
text]
class TrailingBackslash(_CheckFunction):
ENDS_WITH_BACKSLASH = re.compile(r"^[^#].*\\$")
def before(self):
self.backslash = False
def check_line(self, lineno, text):
last_line_ends_in_backslash = self.backslash
# calculate for next line
if self.ENDS_WITH_BACKSLASH.search(text):
self.backslash = True
self.lastline = text
return
self.backslash = False
if last_line_ends_in_backslash and text.strip() == "":
return ["{}:{}: remove trailing backslash"
.format(self.filename, lineno - 1),
self.lastline]
class TypoInPackageVariable(_CheckFunction):
ALLOWED = re.compile("|".join([
"ACLOCAL_DIR",
"ACLOCAL_HOST_DIR",
"BR_CCACHE_INITIAL_SETUP",
"BR_LIBC",
"BR_NO_CHECK_HASH_FOR",
"LINUX_EXTENSIONS",
"LINUX_POST_PATCH_HOOKS",
"LINUX_TOOLS",
"LUA_RUN",
"MKFS_JFFS2",
"MKIMAGE_ARCH",
"PACKAGES_PERMISSIONS_TABLE",
"PKG_CONFIG_HOST_BINARY",
"SUMTOOL",
"TARGET_FINALIZE_HOOKS",
"TARGETS_ROOTFS",
"XTENSA_CORE_NAME"]))
PACKAGE_NAME = re.compile("/([^/]+)\.mk")
VARIABLE = re.compile("^([A-Z0-9_]+_[A-Z0-9_]+)\s*(\+|)=")
def before(self):
package = self.PACKAGE_NAME.search(self.filename).group(1)
package = package.replace("-", "_").upper()
# linux tools do not use LINUX_TOOL_ prefix for variables
package = package.replace("LINUX_TOOL_", "")
# linux extensions do not use LINUX_EXT_ prefix for variables
package = package.replace("LINUX_EXT_", "")
self.package = package
self.REGEX = re.compile("^(HOST_|ROOTFS_)?({}_[A-Z0-9_]+)".format(package))
self.FIND_VIRTUAL = re.compile(
"^{}_PROVIDES\s*(\+|)=\s*(.*)".format(package))
self.virtual = []
def check_line(self, lineno, text):
m = self.VARIABLE.search(text)
if m is None:
return
variable = m.group(1)
# allow to set variables for virtual package this package provides
v = self.FIND_VIRTUAL.search(text)
if v:
self.virtual += v.group(2).upper().split()
return
for virtual in self.virtual:
if variable.startswith("{}_".format(virtual)):
return
if self.ALLOWED.match(variable):
return
if self.REGEX.search(text) is None:
return ["{}:{}: possible typo: {} -> *{}*"
.format(self.filename, lineno, variable, self.package),
text]
class UselessFlag(_CheckFunction):
DEFAULT_AUTOTOOLS_FLAG = re.compile("^.*{}".format("|".join([
"_AUTORECONF\s*=\s*NO",
"_LIBTOOL_PATCH\s*=\s*YES"])))
DEFAULT_GENERIC_FLAG = re.compile("^.*{}".format("|".join([
"_INSTALL_IMAGES\s*=\s*NO",
"_INSTALL_REDISTRIBUTE\s*=\s*YES",
"_INSTALL_STAGING\s*=\s*NO",
"_INSTALL_TARGET\s*=\s*YES"])))
END_CONDITIONAL = re.compile("^\s*({})".format("|".join(end_conditional)))
START_CONDITIONAL = re.compile("^\s*({})".format("|".join(start_conditional)))
def before(self):
self.conditional = 0
def check_line(self, lineno, text):
if self.START_CONDITIONAL.search(text):
self.conditional += 1
return
if self.END_CONDITIONAL.search(text):
self.conditional -= 1
return
# allow non-default conditionally overridden by default
if self.conditional > 0:
return
if self.DEFAULT_GENERIC_FLAG.search(text):
return ["{}:{}: useless default value ({}#"
"_infrastructure_for_packages_with_specific_build_systems)"
.format(self.filename, lineno, self.url_to_manual),
text]
if self.DEFAULT_AUTOTOOLS_FLAG.search(text) and not text.lstrip().startswith("HOST_"):
return ["{}:{}: useless default value "
"({}#_infrastructure_for_autotools_based_packages)"
.format(self.filename, lineno, self.url_to_manual),
text]
class VariableWithBraces(_CheckFunction):
VARIABLE_WITH_BRACES = re.compile(r"^[^#].*[^$]\${\w+}")
def check_line(self, lineno, text):
if self.VARIABLE_WITH_BRACES.match(text.rstrip()):
return ["{}:{}: use $() to delimit variables, not ${{}}"
.format(self.filename, lineno),
text]<|fim▁end|> |
def before(self):
self.conditional = 0 |
<|file_name|>test-hub-shutdown.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2014, Andre Caron ([email protected])
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#include "comfort-zone.hpp"
namespace {
/// @test Verify that shutting down the hub causes tasks to complete.
/// @return Non-zero on test failure.
int test_hub_shutdown ()
{
class TestTask
: public cz::Task
{
/* data. */
private:
int myLimit;
int myCount;
/* construction. */
public:
TestTask (int limit)
: myLimit(limit)
, myCount(0)
{}<|fim▁hole|> /* overrides. */
public:
virtual void run ()
{
try {
while (++myCount <= myLimit) {
std::cout << "iteration #" << myCount << std::endl;
pause();
}
}
catch (...) {
//std::cout << "task shutting down?" << std::endl;
//pause();
}
std::cout << "task completed normally." << std::endl;
}
};
// Star the task.
cz::Hub hub;
// Check that the task can complete by itself.
TestTask task1(5);
hub.spawn(task1);
for (int i=0; (i < 6); ++i) {
hub.resume_pending_slaves();
}
if (!task1.dead()) {
std::cerr
<< "Expecting task #1 to complete!"
<< std::endl;
return (EXIT_FAILURE);
}
// Check that the task can complete by itself.
TestTask task2(5);
hub.spawn(task2);
for (int i=0; (i < 3); ++i) {
hub.resume_pending_slaves();
}
if (task2.dead()) {
std::cerr
<< "Not expecting task #2 to complete!"
<< std::endl;
return (EXIT_FAILURE);
}
std::cerr
<< "Shutting hub down."
<< std::endl;
hub.shutdown();
std::cerr
<< "Checking task #2's state."
<< std::endl;
if (!task2.dead()) {
std::cerr
<< "Expecting task #2 to be aborted!"
<< std::endl;
return (EXIT_FAILURE);
}
// Done.
return (EXIT_SUCCESS);
}
}
#include <w32/app/console-program.hpp>
namespace {
int run (int, wchar_t **)
{
const w32::net::Context _;
return (::test_hub_shutdown());
}
}
#include <w32/app/console-program.cpp><|fim▁end|> | |
<|file_name|>Master.cpp<|end_file_name|><|fim▁begin|>/*
* Ascent MMORPG Server
* Copyright (C) 2005-2008 Ascent Team <http://www.ascentemu.com/>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*/
#include "StdAfx.h"
#define BANNER "Summit r%u/%s-%s-%s :: World Server\n"
#ifndef WIN32
#include <sched.h>
#endif
#include "svn_revision.h"
#include <signal.h>
createFileSingleton( Master );
std::string LogFileName;
bool bLogChat;
bool crashed = false;
volatile bool Master::m_stopEvent = false;
// Database defines.
SERVER_DECL Database* Database_Character;
SERVER_DECL Database* Database_World;
// mainserv defines
SessionLogWriter* GMCommand_Log;
SessionLogWriter* Anticheat_Log;
SessionLogWriter* Player_Log;
extern DayWatcherThread * dw;
void Master::_OnSignal(int s)
{
switch (s)
{
#ifndef WIN32
case SIGHUP:
sWorld.Rehash(true);
break;
#endif
case SIGINT:
case SIGTERM:
case SIGABRT:
#ifdef _WIN32
case SIGBREAK:
#endif
Master::m_stopEvent = true;
break;
}
signal(s, _OnSignal);
}
Master::Master()
{
}
Master::~Master()
{
}
struct Addr
{
unsigned short sa_family;
/* sa_data */
unsigned short Port;
unsigned long IP; // inet_addr
unsigned long unusedA;
unsigned long unusedB;
};
#define DEF_VALUE_NOT_SET 0xDEADBEEF
#ifdef WIN32
static const char* default_config_file = "ascent-world.conf";
static const char* default_realm_config_file = "ascent-realms.conf";
#else
static const char* default_config_file = CONFDIR "/ascent-world.conf";
static const char* default_realm_config_file = CONFDIR "/ascent-realms.conf";
#endif
bool bServerShutdown = false;
bool StartConsoleListener();
void CloseConsoleListener();
ThreadContext * GetConsoleListener();
bool Master::Run(int argc, char ** argv)
{
char * config_file = (char*)default_config_file;
char * realm_config_file = (char*)default_realm_config_file;
int file_log_level = DEF_VALUE_NOT_SET;
int screen_log_level = DEF_VALUE_NOT_SET;
int do_check_conf = 0;
int do_version = 0;
int do_cheater_check = 0;
int do_database_clean = 0;
time_t curTime;
struct ascent_option longopts[] =
{
{ "checkconf", ascent_no_argument, &do_check_conf, 1 },
{ "version", ascent_no_argument, &do_version, 1 },
{ "conf", ascent_required_argument, NULL, 'c' },
{ "realmconf", ascent_required_argument, NULL, 'r' },
{ 0, 0, 0, 0 }
};
char c;
while ((c = ascent_getopt_long_only(argc, argv, ":f:", longopts, NULL)) != -1)
{
switch (c)
{
case 'c':
config_file = new char[strlen(ascent_optarg)];
strcpy(config_file, ascent_optarg);
break;
case 'r':
realm_config_file = new char[strlen(ascent_optarg)];
strcpy(realm_config_file, ascent_optarg);
break;
case 0:
break;
default:
printf("Usage: %s [--checkconf] [--conf <filename>] [--realmconf <filename>] [--version]\n", argv[0]);
return true;
}
}
// Startup banner
UNIXTIME = time(NULL);
g_localTime = *localtime(&UNIXTIME);
printf(BANNER, BUILD_REVISION, CONFIG, PLATFORM_TEXT, ARCH);
printf("Built at %s on %s by %s@%s\n", BUILD_TIME, BUILD_DATE, BUILD_USER, BUILD_HOST);
Log.Line();
if(do_version)
return true;
if( do_check_conf )
{
Log.Notice( "Config", "Checking config file: %s", config_file );
if( Config.MainConfig.SetSource(config_file, true ) )
Log.Success( "Config", "Passed without errors." );
else
Log.Warning( "Config", "Encountered one or more errors." );
Log.Notice( "Config", "Checking config file: %s\n", realm_config_file );
if( Config.RealmConfig.SetSource( realm_config_file, true ) )
Log.Success( "Config", "Passed without errors.\n" );
else
Log.Warning( "Config", "Encountered one or more errors.\n" );
/* test for die variables */
string die;
if( Config.MainConfig.GetString( "die", "msg", &die) || Config.MainConfig.GetString("die2", "msg", &die ) )
Log.Warning( "Config", "Die directive received: %s", die.c_str() );
return true;
}
printf( "The key combination <Ctrl-C> will safely shut down the server at any time.\n" );
Log.Line();
#ifndef WIN32
if(geteuid() == 0 || getegid() == 0)
Log.LargeErrorMessage( LARGERRORMESSAGE_WARNING, "You are running Ascent as root.", "This is not needed, and may be a possible security risk.", "It is advised to hit CTRL+C now and", "start as a non-privileged user.", NULL);
#endif
InitRandomNumberGenerators();
Log.Success( "Rnd", "Initialized Random Number Generators." );
ThreadPool.Startup();
uint32 LoadingTime = getMSTime();
Log.Notice( "Config", "Loading Config Files...\n" );
if( Config.MainConfig.SetSource( config_file ) )
Log.Success( "Config", ">> ascent-world.conf" );
else
{
Log.Error( "Config", ">> ascent-world.conf" );
return false;
}
string die;
if( Config.MainConfig.GetString( "die", "msg", &die) || Config.MainConfig.GetString( "die2", "msg", &die ) )
{
Log.Warning( "Config", "Die directive received: %s", die.c_str() );
return false;
}
if(Config.RealmConfig.SetSource(realm_config_file))
Log.Success( "Config", ">> ascent-realms.conf" );
else
{
Log.Error( "Config", ">> ascent-realms.conf" );
return false;
}
if( !_StartDB() )
{
return false;
}
Log.Line();
sLog.outString( "" );
//ScriptSystem = new ScriptEngine;
//ScriptSystem->Reload();
new EventMgr;
new World;
// open cheat log file
Anticheat_Log = new SessionLogWriter(FormatOutputString( "logs", "cheaters", false).c_str(), false );
GMCommand_Log = new SessionLogWriter(FormatOutputString( "logs", "gmcommand", false).c_str(), false );
Player_Log = new SessionLogWriter(FormatOutputString( "logs", "players", false).c_str(), false );
/* load the config file */
sWorld.Rehash(false);
// Initialize Opcode Table
WorldSession::InitPacketHandlerTable();
string host = Config.MainConfig.GetStringDefault( "Listen", "Host", DEFAULT_HOST );
int wsport = Config.MainConfig.GetIntDefault( "Listen", "WorldServerPort", DEFAULT_WORLDSERVER_PORT );
new ScriptMgr;
if( !sWorld.SetInitialWorldSettings() )
{
Log.Error( "Server", "SetInitialWorldSettings() failed. Something went wrong? Exiting." );
return false;
}
if( do_cheater_check )
sWorld.CleanupCheaters();
g_bufferPool.Init();
sWorld.SetStartTime((uint32)UNIXTIME);
WorldRunnable * wr = new WorldRunnable();
ThreadPool.ExecuteTask(wr);
_HookSignals();
ConsoleThread * console = new ConsoleThread();
ThreadPool.ExecuteTask(console);
uint32 realCurrTime, realPrevTime;
realCurrTime = realPrevTime = getMSTime();
// Socket loop!
uint32 start;
uint32 diff;
uint32 last_time = now();
uint32 etime;
// Start Network Subsystem
sLog.outString( "Starting network subsystem..." );
new SocketMgr;
new SocketGarbageCollector;
sSocketMgr.SpawnWorkerThreads();
LoadingTime = getMSTime() - LoadingTime;
sLog.outString ( "\nServer is ready for connections. Startup time: %ums\n", LoadingTime );
Log.Notice("RemoteConsole", "Starting...");
if( StartConsoleListener() )
{
#ifdef WIN32
ThreadPool.ExecuteTask( GetConsoleListener() );
#endif
Log.Notice("RemoteConsole", "Now open.");
}
else
{
Log.Warning("RemoteConsole", "Not enabled or failed listen.");
}
/* write pid file */
FILE * fPid = fopen( "ascent.pid", "w" );
if( fPid )
{
uint32 pid;
#ifdef WIN32
pid = GetCurrentProcessId();
#else
pid = getpid();
#endif
fprintf( fPid, "%u", (unsigned int)pid );
fclose( fPid );
}
#ifdef WIN32
HANDLE hThread = GetCurrentThread();
#endif
uint32 loopcounter = 0;
//ThreadPool.Gobble();
#ifndef CLUSTERING
/* Connect to realmlist servers / logon servers */
new LogonCommHandler();
sLogonCommHandler.Startup();
/* voicechat */
#ifdef VOICE_CHAT
new VoiceChatHandler();
sVoiceChatHandler.Startup();
#endif
// Create listener
ListenSocket<WorldSocket> * ls = new ListenSocket<WorldSocket>(host.c_str(), wsport);
bool listnersockcreate = ls->IsOpen();
#ifdef WIN32
if( listnersockcreate )
ThreadPool.ExecuteTask(ls);
#endif
while( !m_stopEvent && listnersockcreate )
#else
new ClusterInterface;
sClusterInterface.ConnectToRealmServer();
while(!m_stopEvent)
#endif
{
start = now();
diff = start - last_time;
if(! ((++loopcounter) % 10000) ) // 5mins
{
ThreadPool.ShowStats();
ThreadPool.IntegrityCheck();
g_bufferPool.Optimize();
}
/* since time() is an expensive system call, we only update it once per server loop */
curTime = time(NULL);
if( UNIXTIME != curTime )
{
UNIXTIME = time(NULL);
g_localTime = *localtime(&curTime);
}
#ifndef CLUSTERING
<|fim▁hole|> sVoiceChatHandler.Update();
#endif
#else
sClusterInterface.Update();
#endif
sSocketGarbageCollector.Update();
/* UPDATE */
last_time = now();
etime = last_time - start;
if( 50 > etime )
{
#ifdef WIN32
WaitForSingleObject( hThread, 50 - etime );
#else
Sleep( 50 - etime );
#endif
}
}
_UnhookSignals();
wr->Terminate();
ThreadPool.ShowStats();
/* Shut down console system */
console->terminate();
delete console;
// begin server shutdown
Log.Notice( "Shutdown", "Initiated at %s", ConvertTimeStampToDataTime( (uint32)UNIXTIME).c_str() );
if( lootmgr.is_loading )
{
Log.Notice( "Shutdown", "Waiting for loot to finish loading..." );
while( lootmgr.is_loading )
Sleep( 100 );
}
// send a query to wake it up if its inactive
Log.Notice( "Database", "Clearing all pending queries..." );
// kill the database thread first so we don't lose any queries/data
CharacterDatabase.EndThreads();
WorldDatabase.EndThreads();
Log.Notice( "DayWatcherThread", "Exiting..." );
dw->terminate();
dw = NULL;
#ifndef CLUSTERING
ls->Close();
#endif
CloseConsoleListener();
sWorld.SaveAllPlayers();
Log.Notice( "Network", "Shutting down network subsystem." );
#ifdef WIN32
sSocketMgr.ShutdownThreads();
#endif
sSocketMgr.CloseAll();
bServerShutdown = true;
ThreadPool.Shutdown();
sWorld.LogoutPlayers();
sLog.outString( "" );
delete LogonCommHandler::getSingletonPtr();
sWorld.ShutdownClasses();
Log.Notice( "World", "~World()" );
delete World::getSingletonPtr();
sScriptMgr.UnloadScripts();
delete ScriptMgr::getSingletonPtr();
Log.Notice( "EventMgr", "~EventMgr()" );
delete EventMgr::getSingletonPtr();
Log.Notice( "Database", "Closing Connections..." );
_StopDB();
Log.Notice( "Network", "Deleting Network Subsystem..." );
delete SocketMgr::getSingletonPtr();
delete SocketGarbageCollector::getSingletonPtr();
#ifdef VOICE_CHAT
Log.Notice( "VoiceChatHandler", "~VoiceChatHandler()" );
delete VoiceChatHandler::getSingletonPtr();
#endif
#ifdef ENABLE_LUA_SCRIPTING
sLog.outString("Deleting Script Engine...");
LuaEngineMgr::getSingleton().Unload();
#endif
//delete ScriptSystem;
delete GMCommand_Log;
delete Anticheat_Log;
delete Player_Log;
// remove pid
remove( "ascent.pid" );
g_bufferPool.Destroy();
Log.Notice( "Shutdown", "Shutdown complete." );
#ifdef WIN32
WSACleanup();
// Terminate Entire Application
//HANDLE pH = OpenProcess(PROCESS_TERMINATE, TRUE, GetCurrentProcessId());
//TerminateProcess(pH, 0);
//CloseHandle(pH);
#endif
return true;
}
bool Master::_StartDB()
{
string hostname, username, password, database;
int port = 0;
// Configure Main Database
bool result = Config.MainConfig.GetString( "WorldDatabase", "Username", &username );
Config.MainConfig.GetString( "WorldDatabase", "Password", &password );
result = !result ? result : Config.MainConfig.GetString( "WorldDatabase", "Hostname", &hostname );
result = !result ? result : Config.MainConfig.GetString( "WorldDatabase", "Name", &database );
result = !result ? result : Config.MainConfig.GetInt( "WorldDatabase", "Port", &port );
Database_World = Database::Create();
if(result == false)
{
DEBUG_LOG( "sql: One or more parameters were missing from WorldDatabase directive." );
return false;
}
// Initialize it
if( !WorldDatabase.Initialize(hostname.c_str(), (unsigned int)port, username.c_str(),
password.c_str(), database.c_str(), Config.MainConfig.GetIntDefault( "WorldDatabase", "ConnectionCount", 3 ), 16384 ) )
{
DEBUG_LOG( "sql: Main database initialization failed. Exiting." );
return false;
}
result = Config.MainConfig.GetString( "CharacterDatabase", "Username", &username );
Config.MainConfig.GetString( "CharacterDatabase", "Password", &password );
result = !result ? result : Config.MainConfig.GetString( "CharacterDatabase", "Hostname", &hostname );
result = !result ? result : Config.MainConfig.GetString( "CharacterDatabase", "Name", &database );
result = !result ? result : Config.MainConfig.GetInt( "CharacterDatabase", "Port", &port );
Database_Character = Database::Create();
if(result == false)
{
DEBUG_LOG( "sql: One or more parameters were missing from Database directive." );
return false;
}
// Initialize it
if( !CharacterDatabase.Initialize( hostname.c_str(), (unsigned int)port, username.c_str(),
password.c_str(), database.c_str(), Config.MainConfig.GetIntDefault( "CharacterDatabase", "ConnectionCount", 5 ), 16384 ) )
{
DEBUG_LOG( "sql: Main database initialization failed. Exiting." );
return false;
}
return true;
}
void Master::_StopDB()
{
delete Database_World;
delete Database_Character;
}
#ifndef WIN32
// Unix crash handler :oOoOoOoOoOo
volatile bool m_crashed = false;
void segfault_handler(int c)
{
if( m_crashed )
{
abort();
return; // not reached
}
m_crashed = true;
printf ("Segfault handler entered...\n");
try
{
if( World::getSingletonPtr() != 0 )
{
sLog.outString( "Waiting for all database queries to finish..." );
WorldDatabase.EndThreads();
CharacterDatabase.EndThreads();
sLog.outString( "All pending database operations cleared.\n" );
sWorld.SaveAllPlayers();
sLog.outString( "Data saved." );
}
}
catch(...)
{
sLog.outString( "Threw an exception while attempting to save all data." );
}
printf("Writing coredump...\n");
abort();
}
#endif
void Master::_HookSignals()
{
signal( SIGINT, _OnSignal );
signal( SIGTERM, _OnSignal );
signal( SIGABRT, _OnSignal );
#ifdef _WIN32
signal( SIGBREAK, _OnSignal );
#else
signal( SIGHUP, _OnSignal );
signal(SIGUSR1, _OnSignal);
// crash handler
signal(SIGSEGV, segfault_handler);
signal(SIGFPE, segfault_handler);
signal(SIGILL, segfault_handler);
signal(SIGBUS, segfault_handler);
#endif
}
void Master::_UnhookSignals()
{
signal( SIGINT, 0 );
signal( SIGTERM, 0 );
signal( SIGABRT, 0 );
#ifdef _WIN32
signal( SIGBREAK, 0 );
#else
signal( SIGHUP, 0 );
#endif
}
#ifdef WIN32
Mutex m_crashedMutex;
// Crash Handler
void OnCrash( bool Terminate )
{
sLog.outString( "Advanced crash handler initialized." );
if( !m_crashedMutex.AttemptAcquire() )
TerminateThread( GetCurrentThread(), 0 );
try
{
if( World::getSingletonPtr() != 0 )
{
sLog.outString( "Waiting for all database queries to finish..." );
WorldDatabase.EndThreads();
CharacterDatabase.EndThreads();
sLog.outString( "All pending database operations cleared.\n" );
sWorld.SaveAllPlayers();
sLog.outString( "Data saved." );
}
}
catch(...)
{
sLog.outString( "Threw an exception while attempting to save all data." );
}
sLog.outString( "Closing." );
// beep
//printf("\x7");
// Terminate Entire Application
if( Terminate )
{
HANDLE pH = OpenProcess( PROCESS_TERMINATE, TRUE, GetCurrentProcessId() );
TerminateProcess( pH, 1 );
CloseHandle( pH );
}
}
#endif<|fim▁end|> | #ifdef VOICE_CHAT
|
<|file_name|>SuperBot.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import json
import os
import time
import psutil
import pyautogui
pubg_url = 'steam://rungameid/578080'
PROCNAME = "TslGame.exe"
CRASH_PROCNAME = "BroCrashReporter.exe"
debug_directory = "debug_screenshots"
start_state = "HELLO"
play_state = "PLAYING"
play_timer_max = 60 * 3
matching_state = "MATCHING"
matching_timer_max = 60 * 3
loading_state = "LOADING"
loading_timer_max = 60 * 3
gameloading_state = "GAME IS LOADING"
gameloading_timer_max = 60 * 3
state = start_state
takeScrenshot = True
timer = 0.0
def getConfig():
with open('config.json', encoding='UTF-8') as data_file:
data = json.load(data_file)
return data
def getpixel(x, y):
return pyautogui.screenshot().getpixel((x, y))
def pixelMatchesColor(x, y, expectedRGBColor, tolerance=0):
pix = getpixel(x,y)
if len(pix) == 3 or len(expectedRGBColor) == 3: # RGB mode
r, g, b = pix[:3]
exR, exG, exB = expectedRGBColor[:3]
return (abs(r - exR) <= tolerance) and (abs(g - exG) <= tolerance) and (abs(b - exB) <= tolerance)
elif len(pix) == 4 and len(expectedRGBColor) == 4: # RGBA mode
r, g, b, a = pix
exR, exG, exB, exA = expectedRGBColor
return (abs(r - exR) <= tolerance) and (abs(g - exG) <= tolerance) and (abs(b - exB) <= tolerance) and (
abs(a - exA) <= tolerance)
else:
assert False, 'Color mode was expected to be length 3 (RGB) or 4 (RGBA), but pixel is length %s and expectedRGBColor is length %s' % (
len(pix), len(expectedRGBColor))
def printScreen(message):
if takeScrenshot:
if not os.path.exists(debug_directory):
os.makedirs(debug_directory)
pyautogui.screenshot('{}/{}{}.png'.format(debug_directory, time.strftime("%m.%d %H.%M.%S", time.gmtime()), message))
def changeState(value):
global state, timer
state = value
timer = 0
def killGame():
for proc in psutil.process_iter():
# check whether the process name matches
if proc.name() == PROCNAME:
proc.kill()
def matchesButton(position):
if pixelMatchesColor(position[0], position[1], white_button,
tolerance=color_tolerance) or pixelMatchesColor(position[0],
position[1],
gray_button,
tolerance=color_tolerance) \
or pixelMatchesColor(position[0],
position[1],
super_white_button,
tolerance=color_tolerance) or pixelMatchesColor(
position[0], position[1], golden_button, tolerance=color_tolerance):
return True
return False
def isGameRunning():
for proc in psutil.process_iter():
# check whether the process name matches
if proc.name() == PROCNAME:
return True
else:
return False
def checkTimer():
global state
if state == loading_state and timer > loading_timer_max:
printScreen('Timeout')
print('Timeout. Restarting the game')
changeState(start_state)
elif state == matching_state and timer > matching_timer_max:
printScreen('Timeout')
print('Timeout. Restarting the game')
changeState(start_state)
elif state == play_state and timer > play_timer_max:
printScreen('Timeout')
print('Timeout. Restarting the game')
changeState(start_state)
elif state == gameloading_state and timer > gameloading_timer_max:
printScreen('Timeout')
print('Timeout. Restarting the game')
changeState(start_state)
config = getConfig()
# Menu
print('By using this software you agree with license! You can find it in code.')
print('Choose a server:')
number = 1
for server in config['servers']:
print('{}. {}'.format(number, server['title']))
number += 1
inp = int(input('Type number: '))
inp -= 1
server_position = (config['servers'][inp]['x'], config['servers'][inp]['y'], config['servers'][inp]['title'])
print('Choose a mod:')
number = 1
for server in config['modes']:
print('{}. {}'.format(number, server['title']))
number += 1
inp = int(input('Type number: '))
inp -= 1
print('Can I take screenshots if something wrong happens? (y/N)')
if input().lower() == 'y':
print('Thanks')
else:
print("Well, if something will go wrong, then I can't help you")
takeScrenshot = False
# Position init
mode_position = (config['modes'][inp]['x'], config['modes'][inp]['y'], config['modes'][inp]['title'])
mode_tick_position = (config['modes'][inp]['tick']['x'], config['modes'][inp]['tick']['y'])
play_button_position = (config['play_button']['x'], config['play_button']['y'])
play_state_position = (config['play_state']['x'], config['play_state']['y'])
text_position = (config['text']['x'], config['text']['y'])
exit_position = (config['exit_to_lobby']['x'], config['exit_to_lobby']['y'])
error_position_check = (config['error_position']['x'], config['error_position']['y'])
error_ok_position = (config['error_ok_position']['x'], config['error_ok_position']['y'])
game_message_position = (config['game_message_position']['x'], config['game_message_position']['y'])
exit_button_position = (config['exit_button_position']['x'], config['exit_button_position']['y'])
reconnect_button_position = (config['reconnect_button_position']['x'], config['reconnect_button_position']['y'])
# Reading timings
refresh_rate = config["timers"]["refresh_rate"]
wait_after_killing_a_game = config["timers"]["wait_after_killing_a_game"]
start_delay = config["timers"]["start_delay"]
animation_delay = config["timers"]["animation_delay"]
wait_for_players = config["timers"]["wait_for_players"]
wait_for_plain = config["timers"]["wait_for_plain"]
exit_animation_delay = config["timers"]["exit_animation_delay"]
loading_delay = config["timers"]["loading_delay"]
# Colors
def getColor(config, name):
return (config["colors"][name]["r"], config["colors"][name]["g"], config["colors"][name]["b"])
color_tolerance = config["color_tolerance"]
dark_play_color = getColor(config, "dark_play_color")
play_color = getColor(config, "play_color")
matching_color = getColor(config, "matching_color")
matching_tick_color = getColor(config, "matching_tick_color")
text_start_color = getColor(config, "text_start_color")
white_button = getColor(config, "white_button")
gray_button = getColor(config, "gray_button")
golden_button = getColor(config, "golden_button")
super_white_button = getColor(config, "super_white_button")
windows_background = getColor(config, "windows_background")
exit_button_color = getColor(config, "exit_button_color")
reconnect_button_color = getColor(config, "reconnect_button_color")
# Game info
print('Server: {}. Mode: {}'.format(server_position[2], mode_position[2]))
while (1):
try:
for proc in psutil.process_iter():
# check whether the process name matches
if proc.name() == CRASH_PROCNAME:
print('Fucking bugs in PUBG. Trying to avoid them!')
proc.kill()
killGame()
time.sleep(wait_after_killing_a_game)
changeState(start_state)
except Exception as ex:
print('Something went wrong while killing bug reporter... Error message: {}'.format(ex))
if state == start_state:
if pixelMatchesColor(error_position_check[0], error_position_check[1], windows_background,
tolerance=color_tolerance):
pyautogui.press('enter')
pyautogui.click(error_ok_position[0], error_ok_position[1])
killGame()
time.sleep(wait_after_killing_a_game)<|fim▁hole|> try:
os.startfile(pubg_url)
changeState(loading_state)
time.sleep(start_delay)
print('Loading PUBG')
except Exception as ex:
print('Something went wrong while starating PUBG... Error message: {}'.format(ex))
elif state == loading_state:
if pixelMatchesColor(play_state_position[0], play_state_position[1], play_color,
tolerance=color_tolerance) or pixelMatchesColor(play_state_position[0],
play_state_position[1],
dark_play_color,
tolerance=color_tolerance):
pyautogui.moveTo(play_button_position[0], play_button_position[1])
time.sleep(animation_delay)
# Pick a server
pyautogui.click(server_position[0], server_position[1])
time.sleep(animation_delay)
pyautogui.click(mode_position[0], mode_position[1])
time.sleep(animation_delay)
if pixelMatchesColor(mode_tick_position[0], mode_tick_position[1], matching_tick_color,
tolerance=color_tolerance):
pyautogui.click(mode_tick_position[0], mode_tick_position[1])
pyautogui.click(play_button_position[0], play_button_position[1])
changeState(matching_state)
time.sleep(loading_delay)
print('Starting matchmaking...')
elif pixelMatchesColor(text_position[0], text_position[1], text_start_color, tolerance=color_tolerance):
print('I see text, so the game is probably ready...')
changeState(play_state)
elif pixelMatchesColor(reconnect_button_position[0], reconnect_button_position[1], reconnect_button_color, tolerance=color_tolerance):
print('Nice orange button? I\'ll press it!')
pyautogui.click(reconnect_button_position[0], reconnect_button_position[1])
time.sleep(animation_delay)
elif matchesButton(game_message_position):
print("Game's message was denied")
pyautogui.click(game_message_position[0], game_message_position[1])
elif not pixelMatchesColor(exit_button_position[0], exit_button_position[1], exit_button_color, tolerance=color_tolerance) \
and not pixelMatchesColor(exit_button_position[0], exit_button_position[1], matching_tick_color, tolerance=color_tolerance)\
and timer > 30 and isGameRunning():
print('I can\'t see exit button, so the game is probably ready...')
time.sleep(wait_for_players)
changeState(play_state)
elif state == matching_state:
if pixelMatchesColor(play_state_position[0], play_state_position[1], play_color,
tolerance=color_tolerance) or pixelMatchesColor(play_state_position[0],
play_state_position[1],
dark_play_color,
tolerance=color_tolerance):
changeState(loading_state)
time.sleep(loading_delay)
if not pixelMatchesColor(play_state_position[0], play_state_position[1], matching_color,
tolerance=color_tolerance):
if pixelMatchesColor(play_state_position[0], play_state_position[1], matching_tick_color,
tolerance=color_tolerance):
changeState(gameloading_state)
time.sleep(loading_delay)
print('Session is loading')
elif state == gameloading_state:
if not pixelMatchesColor(play_state_position[0], play_state_position[1], matching_tick_color,
tolerance=color_tolerance):
print('Loading is complete')
time.sleep(wait_for_players)
changeState(play_state)
elif state == play_state:
# print(text_position[0], text_position[1])
if not pixelMatchesColor(text_position[0], text_position[1], text_start_color, tolerance=color_tolerance):
time.sleep(wait_for_plain)
pyautogui.press('esc')
time.sleep(animation_delay)
pyautogui.click(exit_position[0], exit_position[1])
time.sleep(exit_animation_delay)
pyautogui.click(exit_position[0], exit_position[1])
changeState(loading_state)
print('Going in menu. Loading again')
time.sleep(10)
time.sleep(refresh_rate)
timer += refresh_rate
checkTimer()<|fim▁end|> | |
<|file_name|>common_utils.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Attention types.
ATT_LUONG = "luong"
ATT_LUONG_SCALED = "luong_scaled"
ATT_BAHDANAU = "bahdanau"
ATT_BAHDANAU_NORM = "bahdanau_norm"
ATT_TYPES = (ATT_LUONG, ATT_LUONG_SCALED, ATT_BAHDANAU, ATT_BAHDANAU_NORM)
# Encoder types.
ENC_UNI = "uni"
ENC_BI = "bi"
ENC_GNMT = "gnmt"
ENC_TYPES = (ENC_UNI, ENC_BI, ENC_GNMT)
# Decoder types.
DEC_BASIC = "basic"
DEC_ATTENTIVE = "attentive"<|fim▁hole|>DEC_TYPES = (DEC_BASIC, DEC_ATTENTIVE)
# Language model types.
LM_L2R = "left2right"
LM_TYPES = (LM_L2R,)<|fim▁end|> | |
<|file_name|>protractor.conf.js<|end_file_name|><|fim▁begin|><|fim▁hole|>/*eslint-env node */
exports.config = {
specs: [
'test/e2e/**/*.js'
],
baseUrl: 'http://localhost:9000',
chromeOnly: true
}<|fim▁end|> | 'use strict' |
<|file_name|>osxpasscrack.py<|end_file_name|><|fim▁begin|>import itertools
import subprocess
import sys
#http://pastebin.com/zj72xk4N<|fim▁hole|>
#run when system password box is showing eg. keychain password dialog
#apple script for automating dialog box input
sys_script = '''
tell application "System Events" to tell process "SecurityAgent"
set value of text field 1 of window 1 to $(PASS)
click button 1 of group 1 of window 1
end tell
'''
#fill this array with chars for combination
keys = ['s','t','a','r','t']
def automate_login():
for l in xrange(0, len(keys)+1):
for subset in itertools.permutations(keys, l):
guess = ''.join(subset)
tmp = sys_script.replace('$(PASS)', '"%s"' % guess)
try:
subprocess.check_output('osascript -e \'%s\'' % tmp, shell=True)
sys.stdout.write('\rtrying %s ' % guess)
sys.stdout.flush()
except subprocess.CalledProcessError:
print('\nfailed')
return
return
automate_login()<|fim▁end|> | |
<|file_name|>console.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
// Fires up a console with Valid loaded.<|fim▁hole|>Valid = require('./lib/valid');
require('repl').start();<|fim▁end|> | // Allows you to quickly play with Valid on the console.
|
<|file_name|>test_ciftify_recon_all.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
import unittest
import logging
import importlib
import copy
import os
from docopt import docopt
from unittest.mock import patch
import pytest
import ciftify.utils
logging.disable(logging.CRITICAL)
ciftify_recon_all = importlib.import_module('ciftify.bin.ciftify_recon_all')
class ConvertFreesurferSurface(unittest.TestCase):
meshes = ciftify_recon_all.define_meshes('/somewhere/hcp/subject_1',
"164", ["32"], '/tmp/temp_dir', False)
@patch('ciftify.bin.ciftify_recon_all.run')
def test_secondary_type_option_adds_to_set_structure_command(self, mock_run):
secondary_type = 'GRAY_WHITE'
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
surface_secondary_type=secondary_type)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
set_structure_present = False
for item in arg_list:
args = item[0][0]
if '-set-structure' in args:
set_structure_present = True
assert '-surface-secondary-type' in args
assert secondary_type in args
# If this fails the wb_command -set-structure call is not being made
# at all. Is expected at least once regardless of secondary-type option
assert set_structure_present
@patch('ciftify.bin.ciftify_recon_all.run')
def test_secondary_type_not_set_if_option_not_used(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'])
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
set_structure_present = False
for item in arg_list:
args = item[0][0]
if '-set-structure' in args:
set_structure_present = True
assert '-surface-secondary-type' not in args
# If this fails the wb_command -set-structure call is not being made
# at all. Is expected at least once regardless of secondary-type option
assert set_structure_present
@patch('ciftify.bin.ciftify_recon_all.run')
def test_wbcommand_surface_apply_affine_called_when_cras_option_set(self,
mock_run):
cras_file = '/somewhere/cras.mat'
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
cras_mat=cras_file)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
surface_apply_calls = 0
for item in arg_list:
args = item[0][0]
if '-surface-apply-affine' in args and cras_file in args:
surface_apply_calls += 1
# The wb_command -surface-apply-affine command should be run once for
# each hemisphere
assert surface_apply_calls == 2
@patch('ciftify.bin.ciftify_recon_all.run')
def test_no_wbcommand_added_when_cras_option_not_set(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'])
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
surface_apply_calls = 0
for item in arg_list:
args = item[0][0]
if '-surface-apply-affine' in args:
surface_apply_calls += 1
assert surface_apply_calls == 0
@patch('ciftify.bin.ciftify_recon_all.run')
def test_add_to_spec_option_adds_wbcommand_call(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
add_to_spec=True)
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
spec_added_calls = 0
for item in arg_list:
args = item[0][0]
if '-add-to-spec-file' in args:
spec_added_calls += 1
# Should add one call for each hemisphere
assert spec_added_calls == 2
@patch('ciftify.bin.ciftify_recon_all.run')
def test_add_to_spec_option_not_present_when_option_not_set(self, mock_run):
ciftify_recon_all.convert_freesurfer_surface('subject_1', 'white', 'ANATOMICAL',
'/somewhere/freesurfer/subject_1', self.meshes['T1wNative'],
add_to_spec=False)<|fim▁hole|>
assert mock_run.call_count >= 1
arg_list = mock_run.call_args_list
spec_added_calls = 0
for item in arg_list:
args = item[0][0]
if '-add-to-spec-file' in args:
spec_added_calls += 1
assert spec_added_calls == 0
class CreateRegSphere(unittest.TestCase):
@patch('ciftify.bin.ciftify_recon_all.run_MSMSulc_registration')
@patch('ciftify.bin.ciftify_recon_all.run_fs_reg_LR')
def test_reg_sphere_is_not_set_to_none_for_any_mode(self, mock_fs_reg,
mock_msm_reg):
"""
Should fail if MSMSulc registration is implemented without supplying a
value for reg_sphere
"""
# settings stub, to allow tests to be written.
class Settings(object):
def __init__(self, name):
self.high_res = 999
self.reg_name = name
self.ciftify_data_dir = '/somedir/'
self.msm_config = None
# Test reg_sphere set when in FS mode
settings = Settings('FS')
meshes = {'AtlasSpaceNative' : ''}
subject_id = 'some_id'
reg_sphere = ciftify_recon_all.create_reg_sphere(settings, subject_id, meshes)
assert reg_sphere is not None
# Test reg_sphere set when in MSMSulc mode
settings = Settings('MSMSulc')
reg_sphere = ciftify_recon_all.create_reg_sphere(settings, subject_id, meshes)
assert reg_sphere is not None
class CopyAtlasRoiFromTemplate(unittest.TestCase):
@patch('ciftify.bin.ciftify_recon_all.link_to_template_file')
def test_does_nothing_when_roi_src_does_not_exist(self, mock_link):
class Settings(object):
def __init__(self):
self.subject = self.Subject()
self.ciftify_data_dir = '/someotherpath/ciftify/data'
self.work_dir = '/somepath/hcp'
class Subject(object):
def __init__(self):
id = 'some_id'
settings = Settings()
mesh_settings = {'meshname' : 'some_mesh'}
ciftify_recon_all.copy_atlas_roi_from_template(settings, mesh_settings)
assert mock_link.call_count == 0
class DilateAndMaskMetric(unittest.TestCase):
@patch('ciftify.bin.ciftify_recon_all.run')
def test_does_nothing_when_dscalars_map_doesnt_mask_medial_wall(self,
mock_run):
# Stubs to allow testing
dscalars = {'some_map' : {'mask_medialwall' : False}}
mesh = {'tmpdir' : '/tmp/temp_dir',
'meshname' : 'some_mesh'}
ciftify_recon_all.dilate_and_mask_metric('some_id', mesh, dscalars)
assert mock_run.call_count == 0
@patch('os.makedirs')
@patch('ciftify.config.find_fsl')
class TestSettings(unittest.TestCase):
arguments = docopt(ciftify_recon_all.__doc__,
'--hcp-data-dir /somepath/pipelines/hcp --fs-subjects-dir /somepath/pipelines/freesurfer --surf-reg FS STUDY_SITE_ID_01')
subworkdir = '/somepath/pipelines/hcp/STUDY_SITE_ID_01'
yaml_config = {'high_res' : "164",
'low_res' : ["32"],
'grayord_res' : [2],
'dscalars' : {},
'registration' : {'src_dir' : 'T1w',
'dest_dir' : 'MNINonLinear',
'xfms_dir' : 'MNINonLinear/xfms'},
'FSL_fnirt' : {'2mm' : {'FNIRTConfig' : 'etc/flirtsch/T1_2_MNI152_2mm.cnf'}}}
def set_mock_env(self, mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_fs_root_dir_set_to_user_value_when_given(self, mock_exists,
mock_settings, mock_ciftify, mock_fsl, mock_makedirs):
self.set_mock_env(mock_ciftify, mock_fsl, mock_makedirs)
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_settings.return_value = self.yaml_config
settings = ciftify_recon_all.Settings(self.arguments)
assert settings.fs_root_dir == self.arguments['--fs-subjects-dir']
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.config.find_freesurfer_data')
@patch('os.path.exists')
def test_exits_when_no_fs_dir_given_and_cannot_find_shell_value(self,
mock_exists, mock_fs, mock_ciftify, mock_fsl, mock_makedirs):
self.set_mock_env(mock_ciftify, mock_fsl, mock_makedirs)
# This is to avoid sys.exit calls due to the mock directories not
# existing.
mock_exists.return_value = True
# work with a deep copy of arguments to avoid modifications having any
# effect on later tests
args_copy = copy.deepcopy(self.arguments)
args_copy['--fs-subjects-dir'] = None
# Just in case the shell environment has the variable set...
mock_fs.return_value = None
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args_copy)
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_dscalars_doesnt_contain_msmsulc_settings_when_reg_name_is_FS(
self, mock_exists, mock_yaml_settings, mock_ciftify, mock_fsl,
mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
# This is to avoid sys.exit calls due to mock directories not
# existing.
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_yaml_settings.return_value = self.yaml_config
settings = ciftify_recon_all.Settings(self.arguments)
if settings.reg_name == 'FS':
assert 'ArealDistortion_MSMSulc' not in settings.dscalars.keys()
else:
assert True
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_msm_config_set_to_none_in_fs_mode(self, mock_exists,
mock_yaml_settings, mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
# This is to avoid sys.exit calls due to mock directories not
# existing.
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_yaml_settings.return_value = self.yaml_config
args_copy = copy.deepcopy(self.arguments)
args_copy['--surf-reg'] = "FS"
settings = ciftify_recon_all.Settings(self.arguments)
assert settings.msm_config is None
@patch('ciftify.config.find_ciftify_global')
@patch('ciftify.config.verify_msm_available')
@patch('ciftify.bin.ciftify_recon_all.Settings.check_msm_config', return_value = True)
@patch('ciftify.bin.ciftify_recon_all.WorkFlowSettings._WorkFlowSettings__read_settings')
@patch('os.path.exists')
def test_msm_config_set_to_default_when_user_config_not_given(self,
mock_exists, mock_yaml_settings, mock_msm_check, mock_msm_check1,
mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
# This is to avoid sys.exit calls due to mock directories not
# existing.
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
mock_yaml_settings.return_value = self.yaml_config
# Modify copy of arguments, so changes dont effect other tests
args = copy.deepcopy(self.arguments)
args['--surf-reg'] = 'MSMSulc'
args['--MSM-config'] = None
settings = ciftify_recon_all.Settings(args)
assert settings.msm_config is not None
@patch('ciftify.config.find_ciftify_global')
@patch('os.path.exists')
def test_sys_exit_raised_when_user_msm_config_doesnt_exist(self, mock_exists,
mock_ciftify, mock_fsl, mock_makedirs):
# This is to avoid test failure if shell environment changes
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
user_config = "/some/path/nonexistent_config"
mock_exists.side_effect = lambda path: False if path == user_config else True
args = copy.deepcopy(self.arguments)
args['--surf-reg'] = 'MSMSulc'
args['--MSM-config'] = user_config
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args)
@patch('ciftify.config.find_ciftify_global')
@patch('os.path.exists')
def test_sys_exit_raised_when_nonlin_xfm_given_alone(self, mock_exists,
mock_ciftify, mock_fsl, mock_makedirs):
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
args = copy.deepcopy(self.arguments)
args['--read-non-lin-xfm'] = '/some/file'
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args)
@patch('ciftify.config.find_ciftify_global')
@patch('os.path.exists')
def test_sys_exit_raised_when_lin_xfm_given_alone(self, mock_exists,
mock_ciftify, mock_fsl, mock_makedirs):
mock_ciftify.return_value = '/somepath/ciftify/data'
mock_fsl.return_value = '/somepath/FSL'
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
args = copy.deepcopy(self.arguments)
args['--read-lin-premat'] = '/some/file'
with pytest.raises(SystemExit):
settings = ciftify_recon_all.Settings(args)
@patch('ciftify.utils.check_input_readable')
@patch('os.path.exists')
def test_xfms_set_if_given(self, mock_exists, mock_inputreadble,
mock_fsl, mock_makedirs):
mock_fsl.return_value = '/somepath/FSL'
mock_exists.side_effect = lambda path: False if path == self.subworkdir else True
args = copy.deepcopy(self.arguments)
args['--read-lin-premat'] = '/some/file1'
args['--read-non-lin-xfm'] = '/some/file2'
settings = ciftify_recon_all.Settings(args)
# Test should never reach this line
assert settings.registration['User_AtlasTransform_Linear'] == '/some/file1'
assert settings.registration['User_AtlasTransform_NonLinear'] == '/some/file2'<|fim▁end|> | |
<|file_name|>test_realm_domains.py<|end_file_name|><|fim▁begin|>import orjson
from django.core.exceptions import ValidationError
from django.db.utils import IntegrityError
from zerver.lib.actions import (
do_change_realm_domain,
do_change_user_role,
do_create_realm,
do_remove_realm_domain,
do_set_realm_property,
)
from zerver.lib.domains import validate_domain
from zerver.lib.email_validation import email_allowed_for_realm
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import DomainNotAllowedForRealmError, RealmDomain, UserProfile, get_realm
class RealmDomainTest(ZulipTestCase):
def setUp(self) -> None:
realm = get_realm("zulip")
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
def test_list_realm_domains(self) -> None:
self.login("iago")
realm = get_realm("zulip")
RealmDomain.objects.create(realm=realm, domain="acme.com", allow_subdomains=True)
result = self.client_get("/json/realm/domains")
self.assert_json_success(result)
received = result.json()["domains"]
expected = [
{"domain": "zulip.com", "allow_subdomains": False},
{"domain": "acme.com", "allow_subdomains": True},
]
self.assertEqual(received, expected)
def test_not_realm_admin(self) -> None:
self.login("hamlet")
result = self.client_post("/json/realm/domains")
self.assert_json_error(result, "Must be an organization administrator")
result = self.client_patch("/json/realm/domains/15")
self.assert_json_error(result, "Must be an organization administrator")
result = self.client_delete("/json/realm/domains/15")
self.assert_json_error(result, "Must be an organization administrator")
def test_create_realm_domain(self) -> None:
self.login("iago")
data = {
"domain": "",
"allow_subdomains": orjson.dumps(True).decode(),
}
result = self.client_post("/json/realm/domains", info=data)
self.assert_json_error(result, "Invalid domain: Domain can't be empty.")
data["domain"] = "acme.com"
result = self.client_post("/json/realm/domains", info=data)
self.assert_json_success(result)
realm = get_realm("zulip")
self.assertTrue(
RealmDomain.objects.filter(
realm=realm, domain="acme.com", allow_subdomains=True
).exists()
)
result = self.client_post("/json/realm/domains", info=data)
self.assert_json_error(
result, "The domain acme.com is already a part of your organization."
)
mit_user_profile = self.mit_user("sipbtest")
self.login_user(mit_user_profile)
<|fim▁hole|> mit_user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None
)
result = self.client_post(
"/json/realm/domains", info=data, HTTP_HOST=mit_user_profile.realm.host
)
self.assert_json_success(result)
def test_patch_realm_domain(self) -> None:
self.login("iago")
realm = get_realm("zulip")
RealmDomain.objects.create(realm=realm, domain="acme.com", allow_subdomains=False)
data = {
"allow_subdomains": orjson.dumps(True).decode(),
}
url = "/json/realm/domains/acme.com"
result = self.client_patch(url, data)
self.assert_json_success(result)
self.assertTrue(
RealmDomain.objects.filter(
realm=realm, domain="acme.com", allow_subdomains=True
).exists()
)
url = "/json/realm/domains/non-existent.com"
result = self.client_patch(url, data)
self.assertEqual(result.status_code, 400)
self.assert_json_error(result, "No entry found for domain non-existent.com.")
def test_delete_realm_domain(self) -> None:
self.login("iago")
realm = get_realm("zulip")
RealmDomain.objects.create(realm=realm, domain="acme.com")
result = self.client_delete("/json/realm/domains/non-existent.com")
self.assertEqual(result.status_code, 400)
self.assert_json_error(result, "No entry found for domain non-existent.com.")
result = self.client_delete("/json/realm/domains/acme.com")
self.assert_json_success(result)
self.assertFalse(RealmDomain.objects.filter(domain="acme.com").exists())
self.assertTrue(realm.emails_restricted_to_domains)
def test_delete_all_realm_domains(self) -> None:
self.login("iago")
realm = get_realm("zulip")
query = RealmDomain.objects.filter(realm=realm)
self.assertTrue(realm.emails_restricted_to_domains)
for realm_domain in query.all():
do_remove_realm_domain(realm_domain, acting_user=None)
self.assertEqual(query.count(), 0)
# Deleting last realm_domain should set `emails_restricted_to_domains` to False.
# This should be tested on a fresh instance, since the cached objects
# would not be updated.
self.assertFalse(get_realm("zulip").emails_restricted_to_domains)
def test_email_allowed_for_realm(self) -> None:
realm1 = do_create_realm("testrealm1", "Test Realm 1", emails_restricted_to_domains=True)
realm2 = do_create_realm("testrealm2", "Test Realm 2", emails_restricted_to_domains=True)
realm_domain = RealmDomain.objects.create(
realm=realm1, domain="test1.com", allow_subdomains=False
)
RealmDomain.objects.create(realm=realm2, domain="test2.test1.com", allow_subdomains=True)
email_allowed_for_realm("[email protected]", realm1)
with self.assertRaises(DomainNotAllowedForRealmError):
email_allowed_for_realm("[email protected]", realm1)
email_allowed_for_realm("[email protected]", realm2)
email_allowed_for_realm("[email protected]", realm2)
with self.assertRaises(DomainNotAllowedForRealmError):
email_allowed_for_realm("[email protected]", realm2)
do_change_realm_domain(realm_domain, True)
email_allowed_for_realm("[email protected]", realm1)
email_allowed_for_realm("[email protected]", realm1)
with self.assertRaises(DomainNotAllowedForRealmError):
email_allowed_for_realm("[email protected]", realm1)
def test_realm_realm_domains_uniqueness(self) -> None:
realm = get_realm("zulip")
with self.assertRaises(IntegrityError):
RealmDomain.objects.create(realm=realm, domain="zulip.com", allow_subdomains=True)
def test_validate_domain(self) -> None:
invalid_domains = [
"",
"test",
"t.",
"test.",
".com",
"-test",
"test...com",
"test-",
"test_domain.com",
"test.-domain.com",
"a" * 255 + ".com",
]
for domain in invalid_domains:
with self.assertRaises(ValidationError):
validate_domain(domain)
valid_domains = ["acme.com", "x-x.y.3.z"]
for domain in valid_domains:
validate_domain(domain)<|fim▁end|> | do_change_user_role( |
<|file_name|>Server.java<|end_file_name|><|fim▁begin|>package proxy.test;
import java.rmi.Naming;
import java.rmi.RemoteException;
import java.rmi.server.UnicastRemoteObject;
import util.JavaLog;
public class Server extends UnicastRemoteObject implements RemoteInterface{
private static final long serialVersionUID = 1L;
protected Server() throws RemoteException {
super();
}
public static void main(String[] args) {
try {
RemoteInterface service = new Server();
Naming.rebind("Hello", service);
JavaLog.d("sadfasfd");
} catch (Exception e) {
JavaLog.d("sadfasfd:" + e);
}
}
@Override
public String Hi() {
return "Hello,Deyu";
}<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>home-view-model.ts<|end_file_name|><|fim▁begin|>import { Observable } from "tns-core-modules/data/observable";
export class HomeViewModel extends Observable {
constructor() {
super();
}
<|fim▁hole|><|fim▁end|> | } |
<|file_name|>byteResponseDTO.ts<|end_file_name|><|fim▁begin|>/**
* Soccer API<|fim▁hole|> *
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
export interface ByteResponseDTO {
bytes?: string;
}<|fim▁end|> | * No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: 1.0
* Contact: [email protected] |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright (c) Peter Parente
# Distributed under the terms of the BSD 2-Clause License.
import os
from flask import Flask
from flask_sslify import SSLify
from .model import db
from .auth import oauth
from .ui import ui_bp<|fim▁hole|>
app = Flask(__name__)
app.secret_key = os.getenv('SECRET_KEY', os.urandom(24))
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('SQLALCHEMY_DATABASE_URI',
'sqlite:////tmp/bof.db')
app.config['APP_TITLE'] = os.getenv('APP_TITLE', 'Birds of a Feather')
app.config['GITHUB_CONSUMER_KEY'] = os.getenv('GITHUB_CONSUMER_KEY')
app.config['GITHUB_CONSUMER_SECRET'] = os.getenv('GITHUB_CONSUMER_SECRET')
app.register_blueprint(api_bp)
app.register_blueprint(ui_bp)
db.init_app(app)
oauth.init_app(app)
if 'VCAP_SERVICES' in os.environ:
SSLify(app)<|fim▁end|> | from .api import api_bp |
<|file_name|>create-multimethod.ts<|end_file_name|><|fim▁begin|>import {AsyncMultimethod, Decorators, Methods, Multimethod} from '../interface/multimethod';
import {Options} from '../interface/options';
import {analyse} from './analysis';
import {codegen} from './codegen';
// TODO: ...
export function createMultimethod<P extends unknown[]>(options?: Options<P>): Multimethod<P>;
export function createMultimethod<P extends unknown[]>(options?: Options<P, Promise<string>>): AsyncMultimethod<P>;
export function createMultimethod(options: Options<unknown[], any>) {
let mm = codegen(analyse(options, {}, {}));
let result = addMethods(mm, {}, {});
return result;
function addMethods<T>(mm: T, existingMethods: Methods, existingDecorators: Decorators) {
let extend = (methods: Methods<unknown[], unknown, 'super'>) => {
let combinedMethods = combine(existingMethods, methods);
let mm2 = codegen(analyse(options, combinedMethods, existingDecorators));
return addMethods(mm2, combinedMethods, existingDecorators);
};
let decorate = (decorators: Decorators<unknown[], unknown, 'super'>) => {
let combinedDecorators = combine(existingDecorators, decorators);
let mm2 = codegen(analyse(options, existingMethods, combinedDecorators));
return addMethods(mm2, existingMethods, combinedDecorators);
};
return Object.assign(mm, {extend, decorate});
}
}
// TODO: combine two method tables
function combine(existing: Methods, additional: Methods<unknown[], unknown, 'super'>): Methods;
function combine(existing: Decorators, additional: Decorators<unknown[], unknown, 'super'>): Decorators;
function combine(existing: Record<string, Function | Function[]>, additional: Record<string, Function | Array<Function | 'super'>>) {
let existingKeys = Object.keys(existing);
let additionalKeys = Object.keys(additional);
let keysInBoth = existingKeys.filter(k => additionalKeys.indexOf(k) !== -1);
let existingOnlyKeys = existingKeys.filter(k => keysInBoth.indexOf(k) === -1);
let additionalOnlyKeys = additionalKeys.filter(k => keysInBoth.indexOf(k) === -1);
let result = {} as Record<string, Function | Function[]>;
for (let k of existingOnlyKeys) result[k] = existing[k];
for (let k of additionalOnlyKeys) {
let addition = additional[k];
if (typeof addition === 'function') {
result[k] = [addition];
continue;
}
// For this key, there is no existing behaviour to override, so any references to 'super' can simply be elided.
result[k] = addition.filter(a => a !== 'super') as Function[];
}
// TODO: shouldn't need to specify 'super' if decorators are being merged into methods with same key
for (let k of keysInBoth) {
let existingVal = existing[k];
let additionVal = additional[k];
if (!Array.isArray(additionVal) || additionVal.filter(m => m === 'super').length !== 1) {
throw new Error(`Override must be an array with exactly one element containing the value 'super'`);
}
let superIndex = additionVal.indexOf('super');
let pre = additionVal.slice(0, superIndex) as Function[];
let post = additionVal.slice(superIndex + 1) as Function[];
result[k] = pre.concat(existingVal, post);
}
return result;
}
<|fim▁hole|>
// // TODO: temp testing...
// declare const next: never;
// let mm1 = new Multimethod((a: number, b: string) => `/${a}/${b}`);
// mm1 = mm1.extend({foo: async () => 'foo'});
// let x1a = mm1(1, 'sdsd');
// let mm2 = mm1.extend({
// '/foo': async () => 'hi',
// '/bar': async () => next,
// '/baz': () => 'baz',
// });
// let mm2b = mm2.decorate({
// '**': (_, method, args) => 'asd' || method(...args),
// });
// let x2a = mm2(3, 'asda');
// let mm3 = mm2.extend({
// '/foo/*': async (_, a, b) => 'hi hi',
// '/foo/*/*': [() => 'hi hi hi'],
// '/{**path}': ({path}, a, b) => `/${a}/${b}${path}`,
// '/thing/{name}': () => next, // TODO: was... `/${a}/${b}${this.captures.name}`; },
// });
// let x3a = mm3(3, 'asda');
// let mm4a = mm1.extend({foo: () => 'foo'});
// let mm4b = mm1.extend({foo: () => 42});
// let mm4c = mm4a.extend({foo: () => 42});
// let mm4d = mm4c.extend({foo: async () => next});
// mm4a = mm4b;
// mm4a = mm4c;
// mm4b = mm4a;
// mm4b = mm4c;
// mm4c = mm4a;
// mm4c = mm4b;
// let x4a = mm4a(42, 'foo');
// let x4b = mm4b(42, 'foo');
// let mm5 = mm2.extend({
// '/foo': async () => next,
// '/bar': async () => 42,
// });
// let x5a = mm5(3, 'asda');
// let mm6 = mm4b.extend({
// '/foo': () => next,
// '/bar': () => 'foo',
// });
// let x6a = mm6(3, 'asda');
// let mm7 = new Multimethod({});
// let x7a = mm7.extend({foo: [
// (_, a, b) => 'bar',
// (_, a, b) => 'baz',
// 'super',
// ]})();
// let mm8a = new Multimethod(async (a: number, b: string) => `/${a}/${b}`);
// let mm8b = mm8a.extend({'/foo/*': () => 'foo'});
// let mm8c = mm8a.extend({'/bar/*': () => 'bar'}).decorate({'**': () => 'foo'});
// let x8b1 = mm8b(1, 'sdsd');
// let mm9 = mm2.extend({
// '/foo/*': async (x, a, b) => 'hi hi',
// '/bar/*': async (a, b) => 'hi hi',
// '/foo/*/*': [() => 'hi hi hi'],
// '/{**path}': ({path}, a, b) => `/${a}/${b}${path}`,
// '/thing/{name}': (x, y, z) => next, // TODO: was... `/${a}/${b}${this.captures.name}`; },
// });
// let x9a = mm9(3, 'asda');<|fim▁end|> | |
<|file_name|>Process58TestElement.py<|end_file_name|><|fim▁begin|># Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.oval_5.defs.windows.TestType import TestType
logger = logging.getLogger(__name__)
class Process58TestElement(TestType):
MODEL_MAP = {
'tag_name': 'process58_test',
}<|fim▁end|> | # the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful, |
<|file_name|>MessageIntegerOverflowException.java<|end_file_name|><|fim▁begin|>//
// MessagePack for Java
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and<|fim▁hole|>
import java.math.BigInteger;
/**
* This error is thrown when the user tries to read an integer value
* using a smaller types. For example, calling MessageUnpacker.unpackInt() for an integer value
* that is larger than Integer.MAX_VALUE will cause this exception.
*/
public class MessageIntegerOverflowException
extends MessageTypeException
{
private final BigInteger bigInteger;
public MessageIntegerOverflowException(BigInteger bigInteger)
{
super();
this.bigInteger = bigInteger;
}
public MessageIntegerOverflowException(long value)
{
this(BigInteger.valueOf(value));
}
public MessageIntegerOverflowException(String message, BigInteger bigInteger)
{
super(message);
this.bigInteger = bigInteger;
}
public BigInteger getBigInteger()
{
return bigInteger;
}
@Override
public String getMessage()
{
return bigInteger.toString();
}
}<|fim▁end|> | // limitations under the License.
//
package org.msgpack.core; |
<|file_name|>segment_hook.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#<|fim▁hole|># Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
"""
This module contains a Segment Hook
which allows you to connect to your Segment account,
retrieve data from it or write to that file.
NOTE: this hook also relies on the Segment analytics package:
https://github.com/segmentio/analytics-python
"""
import analytics
from airflow.hooks.base_hook import BaseHook
from airflow.exceptions import AirflowException
from airflow.utils.log.logging_mixin import LoggingMixin
class SegmentHook(BaseHook, LoggingMixin):
def __init__(
self,
segment_conn_id='segment_default',
segment_debug_mode=False,
*args,
**kwargs
):
"""
Create new connection to Segment
and allows you to pull data out of Segment or write to it.
You can then use that file with other
Airflow operators to move the data around or interact with segment.
:param segment_conn_id: the name of the connection that has the parameters
we need to connect to Segment.
The connection should be type `json` and include a
write_key security token in the `Extras` field.
:type segment_conn_id: str
:param segment_debug_mode: Determines whether Segment should run in debug mode.
Defaults to False
:type segment_debug_mode: boolean
.. note::
You must include a JSON structure in the `Extras` field.
We need a user's security token to connect to Segment.
So we define it in the `Extras` field as:
`{"write_key":"YOUR_SECURITY_TOKEN"}`
"""
self.segment_conn_id = segment_conn_id
self.segment_debug_mode = segment_debug_mode
self._args = args
self._kwargs = kwargs
# get the connection parameters
self.connection = self.get_connection(self.segment_conn_id)
self.extras = self.connection.extra_dejson
self.write_key = self.extras.get('write_key')
if self.write_key is None:
raise AirflowException('No Segment write key provided')
def get_conn(self):
self.log.info('Setting write key for Segment analytics connection')
analytics.debug = self.segment_debug_mode
if self.segment_debug_mode:
self.log.info('Setting Segment analytics connection to debug mode')
analytics.on_error = self.on_error
analytics.write_key = self.write_key
return analytics
def on_error(self, error, items):
"""
Handles error callbacks when using Segment with segment_debug_mode set to True
"""
self.log.error('Encountered Segment error: {segment_error} with '
'items: {with_items}'.format(segment_error=error,
with_items=items))
raise AirflowException('Segment error: {}'.format(error))<|fim▁end|> | # http://www.apache.org/licenses/LICENSE-2.0
# |
<|file_name|>mdbook.rs<|end_file_name|><|fim▁begin|>use std::path::{Path, PathBuf};
use std::fs::{self, File};
use std::io::Write;
use std::error::Error;
use {BookConfig, BookItem, theme, parse, utils};
use book::BookItems;
use renderer::{Renderer, HtmlHandlebars};
use utils::{PathExt, create_path};
pub struct MDBook {
config: BookConfig,
pub content: Vec<BookItem>,
renderer: Box<Renderer>,
}
impl MDBook {
/// Create a new `MDBook` struct with root directory `root`
///
/// - The default source directory is set to `root/src`
/// - The default output directory is set to `root/book`
///
/// They can both be changed by using [`set_src()`](#method.set_src) and [`set_dest()`](#method.set_dest)
pub fn new(root: &Path) -> MDBook {
if !root.exists() || !root.is_dir() {
output!("{:?} No directory with that name", root);
}
MDBook {
content: vec![],
config: BookConfig::new(root)
.set_src(&root.join("src"))
.set_dest(&root.join("book"))
.to_owned(),
renderer: Box::new(HtmlHandlebars::new()),
}
}
/// Returns a flat depth-first iterator over the elements of the book, it returns an [BookItem enum](bookitem.html):
/// `(section: String, bookitem: &BookItem)`
///
/// ```no_run
/// # extern crate mdbook;
/// # use mdbook::MDBook;
/// # use mdbook::BookItem;
/// # use std::path::Path;
/// # fn main() {
/// # let mut book = MDBook::new(Path::new("mybook"));
/// for item in book.iter() {
/// match item {
/// &BookItem::Chapter(ref section, ref chapter) => {},
/// &BookItem::Affix(ref chapter) => {},
/// &BookItem::Spacer => {},
/// }
/// }
///
/// // would print something like this:
/// // 1. Chapter 1
/// // 1.1 Sub Chapter
/// // 1.2 Sub Chapter
/// // 2. Chapter 2
/// //
/// // etc.
/// # }
/// ```
pub fn iter(&self) -> BookItems {
BookItems {
items: &self.content[..],
current_index: 0,
stack: Vec::new(),
}
}
/// `init()` creates some boilerplate files and directories to get you started with your book.
///
/// ```text
/// book-test/
/// ├── book
/// └── src
/// ├── chapter_1.md
/// └── SUMMARY.md
/// ```
///
/// It uses the paths given as source and output directories and adds a `SUMMARY.md` and a
/// `chapter_1.md` to the source directory.
pub fn init(&mut self) -> Result<(), Box<Error>> {
debug!("[fn]: init");
if !self.config.get_root().exists() {
create_path(self.config.get_root()).unwrap();
output!("{:?} created", self.config.get_root());
}
{
let dest = self.config.get_dest();
let src = self.config.get_src();
if !dest.exists() {
debug!("[*]: {:?} does not exist, trying to create directory", dest);
try!(fs::create_dir(&dest));
}
if !src.exists() {
debug!("[*]: {:?} does not exist, trying to create directory", src);
try!(fs::create_dir(&src));
}
let summary = src.join("SUMMARY.md");
if !summary.exists() {
// Summary does not exist, create it
debug!("[*]: {:?} does not exist, trying to create SUMMARY.md", src.join("SUMMARY.md"));
let mut f = try!(File::create(&src.join("SUMMARY.md")));
debug!("[*]: Writing to SUMMARY.md");
try!(writeln!(f, "# Summary"));
try!(writeln!(f, ""));
try!(writeln!(f, "- [Chapter 1](./chapter_1.md)"));
}
}
// parse SUMMARY.md, and create the missing item related file
try!(self.parse_summary());
debug!("[*]: constructing paths for missing files");
for item in self.iter() {
debug!("[*]: item: {:?}", item);
match *item {
BookItem::Spacer => continue,
BookItem::Chapter(_, ref ch) | BookItem::Affix(ref ch) => {
if ch.path != PathBuf::new() {
let path = self.config.get_src().join(&ch.path);
if !path.exists() {
debug!("[*]: {:?} does not exist, trying to create file", path);
try!(::std::fs::create_dir_all(path.parent().unwrap()));
let mut f = try!(File::create(path));
//debug!("[*]: Writing to {:?}", path);
try!(writeln!(f, "# {}", ch.name));
}
}
}
}
}
debug!("[*]: init done");
Ok(())
}
/// The `build()` method is the one where everything happens. First it parses `SUMMARY.md` to
/// construct the book's structure in the form of a `Vec<BookItem>` and then calls `render()`
/// method of the current renderer.
///
/// It is the renderer who generates all the output files.
pub fn build(&mut self) -> Result<(), Box<Error>> {
debug!("[fn]: build");
try!(self.init());
// Clean output directory
try!(utils::remove_dir_content(&self.config.get_dest()));
try!(self.renderer.render(&self));
Ok(())
}
pub fn copy_theme(&self) -> Result<(), Box<Error>> {
debug!("[fn]: copy_theme");
let theme_dir = self.config.get_src().join("theme");
if !theme_dir.exists() {
debug!("[*]: {:?} does not exist, trying to create directory", theme_dir);
try!(fs::create_dir(&theme_dir));
}
// index.hbs
let mut index = try!(File::create(&theme_dir.join("index.hbs")));
try!(index.write_all(theme::INDEX));
// book.css
let mut css = try!(File::create(&theme_dir.join("book.css")));
try!(css.write_all(theme::CSS));
// book.js
let mut js = try!(File::create(&theme_dir.join("book.js")));
try!(js.write_all(theme::JS));
// highlight.css
let mut highlight_css = try!(File::create(&theme_dir.join("highlight.css")));
try!(highlight_css.write_all(theme::HIGHLIGHT_CSS));
// highlight.js
let mut highlight_js = try!(File::create(&theme_dir.join("highlight.js")));
try!(highlight_js.write_all(theme::HIGHLIGHT_JS));
Ok(())
}
/// Parses the `book.json` file (if it exists) to extract the configuration parameters.
/// The `book.json` file should be in the root directory of the book.
/// The root directory is the one specified when creating a new `MDBook`
///
/// ```no_run
/// # extern crate mdbook;
/// # use mdbook::MDBook;
/// # use std::path::Path;
/// # fn main() {
/// let mut book = MDBook::new(Path::new("root_dir"));
/// # }
/// ```
///
/// In this example, `root_dir` will be the root directory of our book and is specified in function
/// of the current working directory by using a relative path instead of an absolute path.
pub fn read_config(mut self) -> Self {
let root = self.config.get_root().to_owned();
self.config.read_config(&root);
self
}
/// You can change the default renderer to another one by using this method. The only requirement
/// is for your renderer to implement the [Renderer trait](../../renderer/renderer/trait.Renderer.html)
///
/// ```no_run
/// extern crate mdbook;
/// use mdbook::MDBook;
/// use mdbook::renderer::HtmlHandlebars;
/// # use std::path::Path;
///
/// fn main() {
/// let mut book = MDBook::new(Path::new("mybook"))
/// .set_renderer(Box::new(HtmlHandlebars::new()));
///
/// // In this example we replace the default renderer by the default renderer...
/// // Don't forget to put your renderer in a Box
/// }
/// ```
///
/// **note:** Don't forget to put your renderer in a `Box` before passing it to `set_renderer()`
pub fn set_renderer(mut self, renderer: Box<Renderer>) -> Self {
self.renderer = renderer;
self
}
pub fn set_dest(mut self, dest: &Path) -> Self {
// Handle absolute and relative paths
match dest.is_absolute() {
true => { self.config.set_dest(dest); },
false => {
let dest = self.config.get_root().join(dest).to_owned();
self.config.set_dest(&dest);
}
}
self
}
pub fn get_dest(&self) -> &Path {
self.config.get_dest()
}
pub fn set_src(mut self, src: &Path) -> Self {
// Handle absolute and relative paths
match src.is_absolute() {
true => { self.config.set_src(src); },
false => {
let src = self.config.get_root().join(src).to_owned();
self.config.set_src(&src);
}
}
self
}
pub fn get_src(&self) -> &Path {
self.config.get_src()
}
pub fn set_title(mut self, title: &str) -> Self {
self.config.title = title.to_owned();
self
}<|fim▁hole|> }
pub fn set_author(mut self, author: &str) -> Self {
self.config.author = author.to_owned();
self
}
pub fn get_author(&self) -> &str {
&self.config.author
}
// Construct book
fn parse_summary(&mut self) -> Result<(), Box<Error>> {
// When append becomes stable, use self.content.append() ...
self.content = try!(parse::construct_bookitems(&self.config.get_src().join("SUMMARY.md")));
Ok(())
}
}<|fim▁end|> |
pub fn get_title(&self) -> &str {
&self.config.title |
<|file_name|>import_from_imilo.ts<|end_file_name|><|fim▁begin|>import convertImiloPropsToBobProps from './imilo'
import {ImiloProps, getCoords, getDegrees, getIdentity, getMobility,
getSituations} from './imilo_api'
type ImiliPropsFetcher = {
[K in keyof ImiloProps]: (userId: string) => Promise<ImiloProps[K]>
}
async function getImiloPropsFromAllPages(
userId: string, onPageComplete: (pageName: string) => void): Promise<ImiloProps> {
// Get all the URLs that contains part of the i-milo user data (user 'Dossier').
const pageApis: ImiliPropsFetcher = {
Coordonnées: getCoords,
Cursus: getDegrees,
Identité: getIdentity,
Mobilité: getMobility,
Situations: getSituations,
}
// On each page collect the i-milo user data.
const imiloPropsFromAllPages: Partial<ImiloProps> = {}
// Chain loading all pages one after the other.
await (Object.keys(pageApis) as readonly (keyof ImiloProps)[]).reduce(
async <K extends keyof ImiloProps>(iterateOverPreviousPages: Promise<void>, pageName: K) => {
await iterateOverPreviousPages
imiloPropsFromAllPages[pageName] = await pageApis[pageName](userId) as ImiloProps[K]
// Callback to get opportunity to show progress done.
onPageComplete(pageName)
},
Promise.resolve(),
)
return imiloPropsFromAllPages as ImiloProps
}
const BOB_BOOTSTRAP_ADVICES_ENDPOINT =
`${config.canonicalUrl}/conseiller/nouveau-profil-et-projet#`
function openAdvicesPageForBobProps(bobProps: bayes.bob.User): void {
window.open(<|fim▁hole|>
function openImiloModal(title: string, bodyElement: HTMLElement, okLabel: string): HTMLElement {
const modal = document.createElement('div')
modal.innerHTML =
`<div id="confirmDialog" class="modalContainer modal modal-confirm" tabindex="-1"
role="dialog" aria-labelledby="myModalLabel" aria-hidden="true">
<div class="modal-header">
<button type="button" class="close" aria-hidden="true">×</button>
<h2>{{title}}</h2>
</div>
<div class="modal-body">
</div>
<div class="modal-footer">
<div class="centered">
<button class="btn btn-primary" id="btnOk">{{okLabel}}</button>
<button class="btn btn-secondary" id="btnCancel">Annuler</button>
</div>
</div>
</div>`
document.body.appendChild(modal)
const h2 = modal.querySelector('h2')
if (h2) {
h2.textContent = title
}
const modalBody = modal.querySelector('.modal-body')
if (modalBody) {
modalBody.appendChild(bodyElement)
}
const okButton = modal.querySelector('button#btnOk') as HTMLElement
okButton.textContent = okLabel
const closeModal = (): void => {
if (modal.parentNode) {
modal.parentNode.removeChild(modal)
}
}
const closeButton = modal.querySelector('button.close') as HTMLElement
const cancelButton = modal.querySelector('button#btnCancel') as HTMLElement
closeButton.onclick = closeModal
cancelButton.onclick = closeModal
return okButton
}
async function startImportProcess(): Promise<void> {
const pathnameMatch = window.location.pathname.match(/^\/dossier\/([^/]+)\//)
if (!pathnameMatch) {
// eslint-disable-next-line no-console
console.log("Impossible de trouver la référence du dossier dans l'URL")
return
}
const userId = pathnameMatch[1]
if (!userId) {
// This can never happen, the regexp always has a matching group.
return
}
const bodyElement = document.createElement('div')
bodyElement.innerHTML =
`<div>
<h5>Recherche dans i-milo des données pour personnaliser les conseils :</h5>
<div class="bob-loading" />
</div>`
const okButton = openImiloModal(
'Création de conseils personnalisés avec Bob',
bodyElement, 'Ouvrir la page de conseils de Bob')
// Hide until the imilo props are collected.
okButton.style.display = 'none'
const loadingElement = bodyElement.querySelectorAll('div.bob-loading')[0]
const updateModalToShowCompletedPage = (): void => {
if (loadingElement) {
loadingElement.textContent += '.'
}
}
const updateModalToShowDataReadyForBob = (bobProps: bayes.bob.User): void => {
if (loadingElement) {
loadingElement.textContent += ' ✅'
}
const bobPropsJson = JSON.stringify(bobProps, null, 2).
replace(/[",[\]{}]/g, '').
split('\n').filter((line: string): string => line.trim()).join('\n')
bodyElement.innerHTML +=
`<h5>Données que Bob va utiliser pour son diagnostic :</h5>
<textarea
readonly style="width:100%;box-sizing:border-box;height:290px">${bobPropsJson}</textarea>`
// Enable modal button to open Bob.
okButton.onclick = (): void => {
openAdvicesPageForBobProps(bobProps)
}
okButton.style.display = ''
}
const imiloProps = await getImiloPropsFromAllPages(userId, updateModalToShowCompletedPage)
const bobProps = convertImiloPropsToBobProps(imiloProps)
updateModalToShowDataReadyForBob(bobProps)
}
export default startImportProcess<|fim▁end|> | BOB_BOOTSTRAP_ADVICES_ENDPOINT + encodeURIComponent(JSON.stringify(bobProps)), '_blank',
'noopener noreferrer')
}
|
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>/* License: MIT.
* Copyright (C) 2013, 2014, Uri Shaked.
*/
'use strict';
<|fim▁hole|>
grunt.initConfig({
karma: {
unit: {
configFile: 'karma.conf.js',
singleRun: true
}
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
all: [
'Gruntfile.js',
'angular-moment.js',
'tests.js'
]
},
uglify: {
dist: {
files: {
'angular-moment.min.js': 'angular-moment.js'
}
}
}
});
grunt.registerTask('test', [
'jshint',
'karma'
]);
grunt.registerTask('build', [
'jshint',
'uglify'
]);
grunt.registerTask('default', ['build']);
};<|fim▁end|> | module.exports = function (grunt) {
// load all grunt tasks
require('matchdep').filterDev('grunt-*').forEach(grunt.loadNpmTasks);
|
<|file_name|>lint-unused-imports.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>#![allow(dead_code)]
#![allow(deprecated_owned_vector)]
use cal = bar::c::cc;
use std::mem::*; // shouldn't get errors for not using
// everything imported
// Should get errors for both 'Some' and 'None'
use std::option::{Some, None}; //~ ERROR unused import
//~^ ERROR unused import
use test::A; //~ ERROR unused import
// Be sure that if we just bring some methods into scope that they're also
// counted as being used.
use test::B;
// Make sure this import is warned about when at least one of its imported names
// is unused
use std::slice::{from_fn, from_elem}; //~ ERROR unused import
mod test {
pub trait A { fn a(&self) {} }
pub trait B { fn b(&self) {} }
pub struct C;
impl A for C {}
impl B for C {}
}
mod foo {
pub struct Point{x: int, y: int}
pub struct Square{p: Point, h: uint, w: uint}
}
mod bar {
// Don't ignore on 'pub use' because we're not sure if it's used or not
pub use std::cmp::Eq;
pub mod c {
use foo::Point;
use foo::Square; //~ ERROR unused import
pub fn cc(p: Point) -> int { return 2 * (p.x + p.y); }
}
#[allow(unused_imports)]
mod foo {
use std::cmp::Eq;
}
}
fn main() {
cal(foo::Point{x:3, y:9});
let mut a = 3;
let mut b = 4;
swap(&mut a, &mut b);
test::C.b();
let _a = from_elem(0, 0);
}<|fim▁end|> | // except according to those terms.
#![feature(globs)]
#![deny(unused_imports)] |
<|file_name|>instance_test.cc<|end_file_name|><|fim▁begin|>// Copyright 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "clif/python/instance.h"
#include "gtest/gtest.h"
namespace clif {
class PrivateDestructor {
public:
PrivateDestructor() = default;
PrivateDestructor(const PrivateDestructor& other) = delete;
PrivateDestructor& operator=(const PrivateDestructor& other) = delete;
void Delete() { delete this; }
private:
~PrivateDestructor() = default;
};
class MyData {
public:
int a_, b_, c_;
};
TEST(InstanceTest, TestCreationFromRawPointerOwn) {
Instance<MyData> csp1(new MyData, OwnedResource());
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_TRUE(up1);
EXPECT_FALSE(csp1);
EXPECT_TRUE(csp1 == nullptr);
Instance<MyData> csp2(up1.release(), OwnedResource());
std::shared_ptr<MyData> sp = MakeStdShared(csp2);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp2);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp2);
EXPECT_TRUE(sp);
EXPECT_TRUE(csp2 != nullptr);
}
TEST(InstanceTest, TestCreationFromRawPointerNotOwn) {
std::unique_ptr<MyData> up(new MyData);
Instance<MyData> csp1(up.get(), UnOwnedResource());
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_FALSE(up1);
EXPECT_TRUE(csp1);
std::shared_ptr<MyData> sp = MakeStdShared(csp1);<|fim▁hole|>}
TEST(InstanceTest, TestCreateUnownedPrivateDestructpr) {
PrivateDestructor* obj = new PrivateDestructor();
Instance<PrivateDestructor> shared(obj, UnOwnedResource());
EXPECT_FALSE(shared == nullptr);
shared.Destruct();
obj->Delete();
}
TEST(InstanceTest, TestCreationFromUniquePointer) {
std::unique_ptr<MyData> up(new MyData);
Instance<MyData> csp1(std::move(up));
EXPECT_FALSE(up);
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_TRUE(up1);
EXPECT_FALSE(csp1);
Instance<MyData> csp2(move(up1));
std::shared_ptr<MyData> sp = MakeStdShared(csp2);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp2);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp2);
EXPECT_TRUE(sp);
}
TEST(InstanceTest, TestCreationFromUniquePointerWithDefaultDeleter) {
std::unique_ptr<MyData, std::default_delete<MyData>> up(new MyData);
EXPECT_TRUE(up);
Instance<MyData> csp3(move(up));
EXPECT_FALSE(up);
EXPECT_TRUE(csp3);
}
TEST(InstanceTest, TestCreationFromSharedPointer) {
std::shared_ptr<MyData> sp1(new MyData);
Instance<MyData> csp1(sp1);
EXPECT_TRUE(sp1);
EXPECT_TRUE(csp1);
std::unique_ptr<MyData> up1 = MakeStdUnique(&csp1);
EXPECT_FALSE(up1);
EXPECT_TRUE(sp1);
EXPECT_TRUE(csp1);
std::shared_ptr<MyData> sp2 = MakeStdShared(csp1);
std::unique_ptr<MyData> up2 = MakeStdUnique(&csp1);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp1);
EXPECT_TRUE(sp2);
}
} // namespace clif<|fim▁end|> | std::unique_ptr<MyData> up2 = MakeStdUnique(&csp1);
EXPECT_FALSE(up2);
EXPECT_TRUE(csp1);
EXPECT_TRUE(sp); |
<|file_name|>flowerNet.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
AUTHOR : MIN
PURPOSE : the deep learning CNN model, similar as inception
VERSION : 0.1
DATE : 4.2017
"""
__author__ = 'Min'
import math
import time
import tensorflow as tf
from datetime import datetime
NUM_CLASSES = 50
slim = tf.contrib.slim
# 产生截断的正太分布
# produce a truncated normal distriubtion
trunc_normal = lambda stddev: tf.truncated_normal_initializer(0.0, stddev)
# 生成默认参数
# Generate default parameters
def flowerNet_arg_scope(weight_decay = 0.00004, stddev = 0.1,
batch_norm_var_collection = 'moving_vars'):
batch_norm_params = {
# 参数衰减系数
# parameter attenuation coefficient
'decay': 0.9997,
'epsilon': 0.001,
'updates_collections': tf.GraphKeys.UPDATE_OPS,
'variables_collections': {
'beta': None,
'gamma': None,
'moving_mean': [batch_norm_var_collection],
'moving_variance': [batch_norm_var_collection],
}
}
# auto assign default values
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_regularizer = slim.l2_regularizer(weight_decay)):
with slim.arg_scope([slim.conv2d],
weights_initializer = tf.truncated_normal_initializer(stddev = stddev),
activation_fn = tf.nn.relu, #Activation function
normalizer_fn = slim.batch_norm,
normalizer_params = batch_norm_params) as scope:
return scope
# 生成网络的卷积 池化部分
# generate convolitonal layer and pooling layer in the CNN
def flowerNet_base(inputs, scope = None):
end_points = {}
with tf.variable_scope(scope, 'Inception', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride = 1, padding = 'VALID'):
# convolutional layer 3x3/2 32
net = slim.conv2d(inputs, 32, [3, 3], stride = 2, scope = 'conv')
# convolutional layer 3x3/1 32
net = slim.conv2d(net, 32, [3, 3], scope = 'conv_1')
# convolutional layer 3x3/1 64
net = slim.conv2d(net, 64, [3, 3], padding= 'SAME',
scope = 'conv_2')
# max pool layer 3x3/2
net = slim.max_pool2d(net, [3, 3], stride = 2, scope = 'pool')
# convolutional layer 1x1/1 80
net = slim.conv2d(net, 80, [1, 1], scope = 'conv_3')
# convolutional layer 3x3/1 192
net = slim.conv2d(net, 192, [3, 3], scope = 'conv_4')
# max pool layer 3,3/2
net = slim.max_pool2d(net, [3, 3], stride = 2, scope = 'pool_1')
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride = 1, padding = 'SAME'):
# mixed module 1
with tf.variable_scope('mixed'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 48, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 64, [5, 5], scope = 'conv1')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 96, [3, 3], scope = 'conv1')
branch2 = slim.conv2d(branch2, 96, [3, 3], scope = 'conv2')<|fim▁hole|> with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 32, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 2
with tf.variable_scope('mixed_1'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 48, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 64, [5, 5], scope = 'conv1')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 96, [3, 3], scope = 'conv1')
branch2 = slim.conv2d(branch2, 96, [3, 3], scope = 'conv2')
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 64, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 3
with tf.variable_scope('mixed_2'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 48, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 64, [5, 5], scope = 'conv1')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 96, [3, 3], scope = 'conv1')
branch2 = slim.conv2d(branch2, 96, [3, 3], scope = 'conv2')
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 64, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 4
with tf.variable_scope('mixed_3'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 384, [3, 3], stride = 2,
padding = 'VALID', scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 64, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 96, [3, 3], scope = 'conv1')
branch1 = slim.conv2d(branch1, 96, [3, 3], stride = 2,
padding = 'VALID', scope = 'conv2')
with tf.variable_scope('branch2'):
branch2 = slim.max_pool2d(net, [3, 3], stride = 2,
padding = 'VALID', scope = 'maxPool')
net = tf.concat([branch0, branch1, branch2], 3)
# mixed module 5
with tf.variable_scope('mixed_4'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 128, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 128, [1, 7], scope = 'conv1')
branch1 = slim.conv2d(branch1, 192, [7, 1], scope = 'conv2')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 128, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 128, [7, 1], scope = 'conv1')
branch2 = slim.conv2d(branch2, 128, [1, 7], scope = 'conv2')
branch2 = slim.conv2d(branch2, 128, [7, 1], scope = 'conv3')
branch2 = slim.conv2d(branch2, 192, [1, 7], scope = 'conv4')
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 192, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 6
with tf.variable_scope('mixed_5'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 160, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 160, [1, 7], scope = 'conv1')
branch1 = slim.conv2d(branch1, 192, [7, 1], scope = 'conv2')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 160, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 160, [7, 1], scope = 'conv1')
branch2 = slim.conv2d(branch2, 160, [1, 7], scope = 'conv2')
branch2 = slim.conv2d(branch2, 160, [7, 1], scope = 'conv3')
branch2 = slim.conv2d(branch2, 192, [1, 7], scope = 'conv4')
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 192, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 7
with tf.variable_scope('mixed_6'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 160, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 160, [1, 7], scope = 'conv1')
branch1 = slim.conv2d(branch1, 192, [7, 1], scope = 'conv2')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 160, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 160, [7, 1], scope = 'conv1')
branch2 = slim.conv2d(branch2, 160, [1, 7], scope = 'conv2')
branch2 = slim.conv2d(branch2, 160, [7, 1], scope = 'conv3')
branch2 = slim.conv2d(branch2, 192, [1, 7], scope = 'conv4')
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 192, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 8
with tf.variable_scope('mixed_7'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 192, [1, 7], scope = 'conv1')
branch1 = slim.conv2d(branch1, 192, [7, 1], scope = 'conv2')
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 192, [7, 1], scope = 'conv1')
branch2 = slim.conv2d(branch2, 192, [1, 7], scope = 'conv2')
branch2 = slim.conv2d(branch2, 192, [7, 1], scope = 'conv3')
branch2 = slim.conv2d(branch2, 192, [1, 7], scope = 'conv4')
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 192, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
end_points['mixed_7'] = net
# mixed module 9
with tf.variable_scope('mixed_8'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
branch0 = slim.conv2d(branch0, 320, [3, 3], stride = 2,
padding = 'VALID', scope = 'conv1')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 192, [1, 1], scope = 'conv')
branch1 = slim.conv2d(branch1, 192, [1, 7], scope = 'conv1')
branch1 = slim.conv2d(branch1, 192, [7, 1], scope = 'conv2')
branch1 = slim.conv2d(branch1, 192, [3, 3], stride = 2,
padding = 'VALID', scope = 'conv3')
with tf.variable_scope('branch2'):
branch2 = slim.max_pool2d(net, [3, 3], stride = 2,
padding = 'VALID', scope = 'maxPool')
net = tf.concat([branch0, branch1, branch2], 3)
# mixed module 10
with tf.variable_scope('mixed_9'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 320, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 384, [1, 1], scope = 'conv')
branch1 = tf.concat([
slim.conv2d(branch1, 384, [1, 3], scope = 'conv1'),
slim.conv2d(branch1, 384, [3, 1], scope = 'conv2')], 3)
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 448, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 384, [3, 3], scope = 'conv1')
branch2 = tf.concat([
slim.conv2d(branch2, 384, [1, 3], scope = 'conv2'),
slim.conv2d(branch2, 384, [3, 1], scope = 'conv3')], 3)
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 192, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
# mixed module 11
with tf.variable_scope('mixed_10'):
with tf.variable_scope('branch0'):
branch0 = slim.conv2d(net, 320, [1, 1], scope = 'conv')
with tf.variable_scope('branch1'):
branch1 = slim.conv2d(net, 384, [1, 1], scope = 'conv')
branch1 = tf.concat([
slim.conv2d(branch1, 384, [1, 3], scope = 'conv1'),
slim.conv2d(branch1, 384, [3, 1], scope = 'conv2')], 3)
with tf.variable_scope('branch2'):
branch2 = slim.conv2d(net, 448, [1, 1], scope = 'conv')
branch2 = slim.conv2d(branch2, 384, [3, 3], scope = 'conv1')
branch2 = tf.concat([
slim.conv2d(branch2, 384, [1, 3], scope = 'conv2'),
slim.conv2d(branch2, 384, [3, 1], scope = 'conv3')], 3)
with tf.variable_scope('branch3'):
branch3 = slim.avg_pool2d(net, [3, 3], scope = 'avgPool')
branch3 = slim.conv2d(branch3, 192, [1, 1], scope = 'conv')
net = tf.concat([branch0, branch1, branch2, branch3], 3)
return net, end_points
# global avg pool and softmax and logits
def flowerNet(inputs, numClasses, isTraining = True,
dropoutKeepProb = 0.8, predictionFN = slim.softmax,
spatialSqueeze = True, reuse = None, scope = 'flowerNet'):
with tf.variable_scope(scope, 'flowerNet', [inputs, numClasses],
reuse = reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training = isTraining):
net, end_points = flowerNet_base(inputs, scope = scope)
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride = 1, padding = 'SAME'):
aux_logits = end_points['mixed_7']
with tf.variable_scope('AuxLogits'):
aux_logits = slim.avg_pool2d(aux_logits,
[5, 5], stride = 3,
padding = 'VALID', scope = 'avgPool')
aux_logits = slim.conv2d(aux_logits, 128, [1, 1], scope = 'conv')
aux_logits = slim.conv2d(aux_logits, 768, [5, 5],
weights_initializer = trunc_normal(0.01),
padding = 'VALID', scope = 'conv1')
aux_logits = slim.conv2d(aux_logits, numClasses, [1, 1],
activation_fn = None,
normalizer_fn = None,
weights_initializer = trunc_normal(0.001),
scope = 'conv2')
if spatialSqueeze:
aux_logits = tf.squeeze(aux_logits, [1, 2], name = 'SpatialSqueeze')
end_points['AuxLogits'] = aux_logits
with tf.variable_scope('Logits'):
net = slim.avg_pool2d(net, [8, 8], padding = 'VALID',
scope = 'avgPool')
net = slim.dropout(net, keep_prob = dropoutKeepProb,
scope = 'dropout')
end_points['PreLogits'] = net
logits = slim.conv2d(net, numClasses, [1, 1], activation_fn = None,
normalizer_fn = None, scope = 'conv')
if spatialSqueeze:
logits = tf.squeeze(logits, [1, 2], name = 'SpatialSqueeze')
end_points['Logits'] = logits
end_points['Predictions'] = predictionFN(logits, scope = 'Predictions')
return logits, end_points
def time_test(session, target, info_string):
num_steps_burn = 10
total_duration = 0.0
total_duration_squared = 0.0
for i in range(num_batches + num_steps_burn):
startTime = time.time()
_ = session.run(target)
duration = time.time() - startTime
if i >= num_steps_burn:
if not i % 10:
print('%s: step %d, duration = %.3f' % (datetime.now(),
i - num_steps_burn, duration))
total_duration += duration
total_duration_squared += duration * duration
mn = total_duration / num_batches
vr =total_duration_squared / num_batches - mn * mn
sd = math.sqrt(vr)
print('%s: %s across %d steps, %.3f +/- %.3f sec / batch' %
(datetime.now(), info_string, num_batches, mn, sd))
if __name__ == '__main__':
batchSize = 100
height, weight = 299, 299
inputs = tf.random_uniform((batchSize, height, weight, 3))
with slim.arg_scope(flowerNet_arg_scope()):
logits, end_points = flowerNet(inputs, isTraining = False,
numClasses = NUM_CLASSES)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
num_batches = 100
time_test(sess, logits, "Forward")
#END<|fim▁end|> | |
<|file_name|>lxqt-session_fr_FR.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="fr_FR">
<context>
<name>LXQtModuleManager</name>
<message>
<source>LXQt Session Crash Report</source>
<translation type="vanished">Rapport de plantage de session LXQt</translation>
</message>
<message>
<source>Application '%1' crashed too many times. Its autorestart has been disabled for current session.</source>
<translation type="vanished">L'application '%1' a planté trop souvent. Son redémarrage automatique a été désactivé pour la session courante.</translation>
</message>
<message>
<location filename="../src/lxqtmodman.cpp" line="289"/>
<source>Crash Report</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/lxqtmodman.cpp" line="290"/>
<source><b>%1</b> crashed too many times. Its autorestart has been disabled until next login.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WmSelectDialog</name>
<message>
<source>Welcome to LXQt</source>
<translation type="vanished">Bienvenue sur LXQt</translation>
</message>
<message>
<source><b>Welcome to LXQt</b>
<p>
Before starting to use the LXQt, you might want to select the Windows Manager:</source>
<translation type="vanished"><b>Bienvenue sur LXQt</b>
<p>
Avant de commencer à utiliser LXQt, vous voulez peut-être choisir un gestionnaire de fenêtres :</translation>
</message>
<message>
<source>You can change the Window Manager later at any time via LXQt Session Configurator.</source>
<translation type="vanished">Vous pouvez changer de gestionnaire de fenêtres à tout moment dans le paneau de configuration de LXQt.</translation>
</message>
<message>
<location filename="../src/wmselectdialog.cpp" line="62"/>
<source>Other ...</source>
<translation>Autre…</translation>
</message>
<message>
<location filename="../src/wmselectdialog.cpp" line="63"/>
<source>Choose your favorite one.</source>
<translation>Choisissez votre préféré.</translation>
</message>
<message>
<location filename="../src/wmselectdialog.ui" line="14"/>
<source>Welcome to LXQt</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../src/wmselectdialog.ui" line="20"/>
<source><html><head/><body><p><span style=" font-weight:600;">Welcome to LXQt</span></p><p>Please select your default Window Manager.</p></body></html></source>
<translation type="unfinished"></translation>
</message>
<message><|fim▁hole|></context>
</TS><|fim▁end|> | <location filename="../src/wmselectdialog.ui" line="71"/>
<source>You will be able to change this at any time through Preferences -> Session Settings -> Basic Settings.</source>
<translation type="unfinished"></translation>
</message> |
<|file_name|>in_memory_repository.py<|end_file_name|><|fim▁begin|><|fim▁hole|>class InMemoryRepository(list):
pass<|fim▁end|> | |
<|file_name|>populatedb.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
with open('clinicalsearch/trials_ranked.csv', 'rU') as csvfile:
reader = csv.reader(csvfile, delimiter=',')
for row in reader:
print row
t = ClinicalTrial(id=row[0], sponsor=row[1], published=(row[2]=="TRUE"), state=row[3], url=row[4], ongoing=(row[5]=="TRUE"), title=row[6], condition=row[7], intervention=row[8], locations=row[9], last_changed=row[10], min_age=int(row[11]), max_age=int(row[12]), genders=row[13], health=(row[14] == "True"), ranking=int(row[15]))
t.save()<|fim▁end|> | # Run this in the Django shell
from clinicalsearch.models import ClinicalTrial
import csv |
<|file_name|>inv.cpp<|end_file_name|><|fim▁begin|>#include <iostream>
#include <vector>
//merges two arrays
std::vector<int> merge(std::vector<int>& ar1,std::vector<int>& ar2,unsigned int& inv_count) {
unsigned int n=ar1.size();
unsigned int m=ar2.size();
std::vector<int> merged(n+m);
unsigned int a,b;
a=0;b=0;
for(unsigned int i=0;i<(n+m);++i) {
if(a==n) {
merged[i]=ar2[b];
b++;
} else if(b==m) {
merged[i]=ar1[a];
a++;
} else if(ar1[a]<=ar2[b]) {
merged[i]=ar1[a];
a++;
} else {
merged[i]=ar2[b];
inv_count+=n-a;
b++;
}
}
return merged;
}
std::vector<int> mergesort(std::vector<int>& arr,unsigned int& inv_count) {
unsigned int n=arr.size();
unsigned int n1,n2;
if(n==1) return arr;
else {
n1=n/2;<|fim▁hole|> ar1[i]=arr[i];
}
for(unsigned int i=0;i<n2;++i) {
ar2[i]=arr[i+n1];
}
ar1=mergesort(ar1,inv_count);
ar2=mergesort(ar2,inv_count);
arr=merge(ar1,ar2,inv_count);
return arr;
}
}
int main() {
unsigned int n;
unsigned int n1,n2;
unsigned int inv_count=0;
std::cin >> n;
std::vector<int> org(n);
for(unsigned int i=0;i<n;++i) {
std::cin >> org[i];
}
org=mergesort(org,inv_count);
// for(auto &it: org) {
// std::cout << it << " ";
// }
// std::cout << std::endl;
std::cout << inv_count << std::endl;
return 0;
}<|fim▁end|> | n2=n-n1;
std::vector<int> ar1(n1),ar2(n2);
for(unsigned int i=0;i<n1;++i) { |
<|file_name|>ConfigDialog.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
***************************************************************************
ConfigDialog.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from PyQt4 import uic
from PyQt4.QtCore import Qt, QEvent, QPyNullVariant
from PyQt4.QtGui import (QFileDialog, QDialog, QIcon, QStyle,
QStandardItemModel, QStandardItem, QMessageBox, QStyledItemDelegate,
QLineEdit, QWidget, QToolButton, QHBoxLayout,
QComboBox)
from qgis.gui import QgsDoubleSpinBox, QgsSpinBox
from processing.core.ProcessingConfig import ProcessingConfig, Setting
from processing.core.Processing import Processing
pluginPath = os.path.split(os.path.dirname(__file__))[0]
WIDGET, BASE = uic.loadUiType(
os.path.join(pluginPath, 'ui', 'DlgConfig.ui'))
class ConfigDialog(BASE, WIDGET):
def __init__(self, toolbox):
super(ConfigDialog, self).__init__(None)
self.setupUi(self)
self.toolbox = toolbox
self.groupIcon = QIcon()
self.groupIcon.addPixmap(self.style().standardPixmap(
QStyle.SP_DirClosedIcon), QIcon.Normal, QIcon.Off)
self.groupIcon.addPixmap(self.style().standardPixmap(
QStyle.SP_DirOpenIcon), QIcon.Normal, QIcon.On)
if hasattr(self.searchBox, 'setPlaceholderText'):
self.searchBox.setPlaceholderText(self.tr('Search...'))
self.model = QStandardItemModel()
self.tree.setModel(self.model)
self.delegate = SettingDelegate()
self.tree.setItemDelegateForColumn(1, self.delegate)
self.searchBox.textChanged.connect(self.fillTree)
self.fillTree()
self.tree.expanded.connect(self.adjustColumns)
def fillTree(self):
self.items = {}
self.model.clear()
self.model.setHorizontalHeaderLabels([self.tr('Setting'),
self.tr('Value')])
text = unicode(self.searchBox.text())
settings = ProcessingConfig.getSettings()
rootItem = self.model.invisibleRootItem()
priorityKeys = [self.tr('General'), self.tr('Models'), self.tr('Scripts')]
for group in priorityKeys:
groupItem = QStandardItem(group)
icon = ProcessingConfig.getGroupIcon(group)
groupItem.setIcon(icon)
groupItem.setEditable(False)
emptyItem = QStandardItem()
emptyItem.setEditable(False)
rootItem.insertRow(0, [groupItem, emptyItem])
for setting in settings[group]:
if setting.hidden:
continue
if text == '' or text.lower() in setting.description.lower():
labelItem = QStandardItem(setting.description)
labelItem.setIcon(icon)
labelItem.setEditable(False)
self.items[setting] = SettingItem(setting)
groupItem.insertRow(0, [labelItem, self.items[setting]])
if text != '':
self.tree.expand(groupItem.index())
providersItem = QStandardItem(self.tr('Providers'))
icon = QIcon(os.path.join(pluginPath, 'images', 'alg.png'))
providersItem.setIcon(icon)
providersItem.setEditable(False)
emptyItem = QStandardItem()
emptyItem.setEditable(False)
rootItem.insertRow(0, [providersItem, emptyItem])
for group in settings.keys():
if group in priorityKeys:
continue
groupItem = QStandardItem(group)
icon = ProcessingConfig.getGroupIcon(group)
groupItem.setIcon(icon)
groupItem.setEditable(False)
for setting in settings[group]:
if setting.hidden:<|fim▁hole|> continue
if text == '' or text.lower() in setting.description.lower():
labelItem = QStandardItem(setting.description)
labelItem.setIcon(icon)
labelItem.setEditable(False)
self.items[setting] = SettingItem(setting)
groupItem.insertRow(0, [labelItem, self.items[setting]])
emptyItem = QStandardItem()
emptyItem.setEditable(False)
providersItem.appendRow([groupItem, emptyItem])
self.tree.sortByColumn(0, Qt.AscendingOrder)
self.adjustColumns()
def accept(self):
for setting in self.items.keys():
if isinstance(setting.value, bool):
setting.setValue(self.items[setting].checkState() == Qt.Checked)
else:
try:
setting.setValue(unicode(self.items[setting].text()))
except ValueError as e:
QMessageBox.warning(self, self.tr('Wrong value'),
self.tr('Wrong value for parameter "%s":\n\n%s' % (setting.description, unicode(e))))
return
setting.save()
Processing.updateAlgsList()
QDialog.accept(self)
def adjustColumns(self):
self.tree.resizeColumnToContents(0)
self.tree.resizeColumnToContents(1)
class SettingItem(QStandardItem):
def __init__(self, setting):
QStandardItem.__init__(self)
self.setting = setting
self.setData(setting, Qt.UserRole)
if isinstance(setting.value, bool):
self.setCheckable(True)
self.setEditable(False)
if setting.value:
self.setCheckState(Qt.Checked)
else:
self.setCheckState(Qt.Unchecked)
else:
self.setData(setting.value, Qt.EditRole)
class SettingDelegate(QStyledItemDelegate):
def __init__(self, parent=None):
QStyledItemDelegate.__init__(self, parent)
def createEditor(
self,
parent,
options,
index,
):
setting = index.model().data(index, Qt.UserRole)
if setting.valuetype == Setting.FOLDER:
return FileDirectorySelector(parent)
elif setting.valuetype == Setting.FILE:
return FileDirectorySelector(parent, True)
elif setting.valuetype == Setting.SELECTION:
combo = QComboBox(parent)
combo.addItems(setting.options)
return combo
else:
value = self.convertValue(index.model().data(index, Qt.EditRole))
if isinstance(value, (int, long)):
spnBox = QgsSpinBox(parent)
spnBox.setRange(-999999999, 999999999)
return spnBox
elif isinstance(value, float):
spnBox = QgsDoubleSpinBox(parent)
spnBox.setRange(-999999999.999999, 999999999.999999)
spnBox.setDecimals(6)
return spnBox
elif isinstance(value, (str, unicode)):
return QLineEdit(parent)
def setEditorData(self, editor, index):
value = self.convertValue(index.model().data(index, Qt.EditRole))
setting = index.model().data(index, Qt.UserRole)
if setting.valuetype == Setting.SELECTION:
editor.setCurrentIndex(editor.findText(value))
else:
editor.setText(value)
def setModelData(self, editor, model, index):
value = self.convertValue(index.model().data(index, Qt.EditRole))
setting = index.model().data(index, Qt.UserRole)
if setting.valuetype == Setting.SELECTION:
model.setData(index, editor.currentText(), Qt.EditRole)
else:
if isinstance(value, (str, basestring)):
model.setData(index, editor.text(), Qt.EditRole)
else:
model.setData(index, editor.value(), Qt.EditRole)
def sizeHint(self, option, index):
return QgsSpinBox().sizeHint()
def eventFilter(self, editor, event):
if event.type() == QEvent.FocusOut and hasattr(editor, 'canFocusOut'):
if not editor.canFocusOut:
return False
return QStyledItemDelegate.eventFilter(self, editor, event)
def convertValue(self, value):
if value is None or isinstance(value, QPyNullVariant):
return ""
try:
return int(value)
except:
try:
return float(value)
except:
return unicode(value)
class FileDirectorySelector(QWidget):
def __init__(self, parent=None, selectFile=False):
QWidget.__init__(self, parent)
# create gui
self.btnSelect = QToolButton()
self.btnSelect.setText(self.tr('...'))
self.lineEdit = QLineEdit()
self.hbl = QHBoxLayout()
self.hbl.setMargin(0)
self.hbl.setSpacing(0)
self.hbl.addWidget(self.lineEdit)
self.hbl.addWidget(self.btnSelect)
self.setLayout(self.hbl)
self.canFocusOut = False
self.selectFile = selectFile
self.setFocusPolicy(Qt.StrongFocus)
self.btnSelect.clicked.connect(self.select)
def select(self):
lastDir = ''
if not self.selectFile:
selectedPath = QFileDialog.getExistingDirectory(None,
self.tr('Select directory'), lastDir,
QFileDialog.ShowDirsOnly)
else:
selectedPath = QFileDialog.getOpenFileName(None,
self.tr('Select file'), lastDir, self.tr('All files (*.*)')
)
if not selectedPath:
return
self.lineEdit.setText(selectedPath)
self.canFocusOut = True
def text(self):
return self.lineEdit.text()
def setText(self, value):
self.lineEdit.setText(value)<|fim▁end|> | |
<|file_name|>day_5.rs<|end_file_name|><|fim▁begin|>use std::iter::Peekable;
use std::str::Chars;
pub fn evaluate(line: &str) -> f32 {
evaluate_iter(&mut line.chars().peekable())
}
fn evaluate_iter(iter: &mut Peekable<Chars>) -> f32 {
let mut accumulator = parse_term(iter.by_ref());
while iter.peek().is_some() {
let sign = iter.peek().cloned();
match sign {
Some('+') => { iter.next(); accumulator += parse_term(iter.by_ref()) },
Some('-') => { iter.next(); accumulator -= parse_term(iter.by_ref()) },
Some(_) | None => break,
}
}
accumulator
}
fn parse_term(iter: &mut Peekable<Chars>) -> f32 {
let mut accumulator = parse_arg(iter.by_ref());
while iter.peek().is_some() {
let sign = iter.peek().cloned();
match sign {
Some('×') => { iter.next(); accumulator *= parse_arg(iter.by_ref()) },
Some('÷') => { iter.next(); accumulator /= parse_arg(iter.by_ref()) },
Some(_) | None => break,
}
}
accumulator
}
fn parse_arg(iter: &mut Peekable<Chars>) -> f32 {
let mut has_point = false;
let mut accumulator = 0.0;
let mut exponent = 0.1;
while iter.peek().is_some() && (iter.peek().unwrap().is_digit(10) || *iter.peek().unwrap() == '.') {
let symbol = iter.next();
match symbol {
Some('.') => {
has_point = true;
continue
},
Some(d @ '0'...'9') => {
let v = d.to_digit(10).unwrap() as f32;
if has_point {
accumulator += v * exponent;
exponent *= 0.1;
}
else {
accumulator = accumulator*10.0 + v;
}
},
_ => break,
}
}
accumulator
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_evaluate_simple_number() {
assert_eq!(evaluate("1"), 1.0);
}
#[test]
fn test_evaluate_big_number() {
assert_eq!(evaluate("100"), 100.0);
}
#[test]
fn test_evaluate_real_number() {
assert_eq!(evaluate("1.09"), 1.09)
}
#[test]
fn test_evaluate_add() {
assert_eq!(evaluate("1.09+1.01"), 2.1);
}
#[test]
fn test_evaluate_sub() {
assert_eq!(evaluate("2-1"), 1.0);
}
#[test]
fn test_evaluate_mul() {
assert_eq!(evaluate("2×2"), 4.0);
}
#[test]
fn test_evaluate_div() {
assert_eq!(evaluate("22÷2"), 11.0);
}
#[test]
fn test_two_adds() {
assert_eq!(evaluate("2+3+6"), 11.0);
}
#[test]<|fim▁hole|>
#[test]
fn test_operation_with_different_priority() {
assert_eq!(evaluate("2+3×2"), 8.0);
}
}<|fim▁end|> | fn test_two_subs() {
assert_eq!(evaluate("6-4-1"), 1.0);
} |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# coding: utf-8
#
import re
import os
import time
import argparse
import yaml
import bunch
import uiautomator2 as u2
from logzero import logger
CLICK = "click"
# swipe
SWIPE_UP = "swipe_up"
SWIPE_RIGHT = "swipe_right"
SWIPE_LEFT = "swipe_left"
SWIPE_DOWN = "swipe_down"
SCREENSHOT = "screenshot"
EXIST = "assert_exist"
WAIT = "wait"
def split_step(text: str):
__alias = {
"点击": CLICK,
"上滑": SWIPE_UP,
"右滑": SWIPE_RIGHT,
"左滑": SWIPE_LEFT,
"下滑": SWIPE_DOWN,
"截图": SCREENSHOT,
"存在": EXIST,
"等待": WAIT,
}
for keyword in __alias.keys():
if text.startswith(keyword):
body = text[len(keyword):].strip()
return __alias.get(keyword, keyword), body
else:
raise RuntimeError("Step unable to parse", text)
def read_file_content(path: str, mode:str = "r") -> str:
with open(path, mode) as f:
return f.read()
def run_step(cf: bunch.Bunch, app: u2.Session, step: str):
logger.info("Step: %s", step)
oper, body = split_step(step)
logger.debug("parse as: %s %s", oper, body)
if oper == CLICK:
app.xpath(body).click()
elif oper == SWIPE_RIGHT:
app.xpath(body).swipe("right")
elif oper == SWIPE_UP:
app.xpath(body).swipe("up")
elif oper == SWIPE_LEFT:
app.xpath(body).swipe("left")
elif oper == SWIPE_DOWN:
app.xpath(body).swipe("down")
elif oper == SCREENSHOT:
output_dir = "./output"
filename = "screen-%d.jpg" % int(time.time()*1000)
if body:
filename = body
name_noext, ext = os.path.splitext(filename)
if ext.lower() not in ['.jpg', '.jpeg', '.png']:
ext = ".jpg"
os.makedirs(cf.output_directory, exist_ok=True)
filename = os.path.join(cf.output_directory, name_noext + ext)
logger.debug("Save screenshot: %s", filename)
app.screenshot().save(filename)
elif oper == EXIST:
assert app.xpath(body).wait(), body
elif oper == WAIT:
#if re.match("^[\d\.]+$")
if body.isdigit():
seconds = int(body)
logger.info("Sleep %d seconds", seconds)
time.sleep(seconds)
else:
app.xpath(body).wait()
else:
raise RuntimeError("Unhandled operation", oper)
def run_conf(d, conf_filename: str):
d.healthcheck()
d.xpath.when("允许").click()
d.xpath.watch_background(2.0)
cf = yaml.load(read_file_content(conf_filename), Loader=yaml.SafeLoader)
default = {
"output_directory": "output",
"action_before_delay": 0,
"action_after_delay": 0,
"skip_cleanup": False,
}
for k, v in default.items():
cf.setdefault(k, v)
cf = bunch.Bunch(cf)
print("Author:", cf.author)
print("Description:", cf.description)
print("Package:", cf.package)
logger.debug("action_delay: %.1f / %.1f", cf.action_before_delay, cf.action_after_delay)
app = d.session(cf.package)
for step in cf.steps:
time.sleep(cf.action_before_delay)
run_step(cf, app, step)
time.sleep(cf.action_after_delay)
if not cf.skip_cleanup:
app.close()
<|fim▁hole|>def test_entry():
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--command", help="run single step command")
parser.add_argument("-s", "--serial", help="run single step command")
parser.add_argument("conf_filename", default="test.yml", nargs="?", help="config filename")
args = parser.parse_args()
d = u2.connect(args.serial)
if args.command:
cf = bunch.Bunch({"output_directory": "output"})
app = d.session()
run_step(cf, app, args.command)
else:
run_conf(d, args.conf_filename)<|fim▁end|> | device = None
conf_filename = None
|
<|file_name|>validation-parser.ts<|end_file_name|><|fim▁begin|>/**
* @file Validation Parser
* @author Alexander Rose <[email protected]>
* @private
*/
<|fim▁hole|>import { Debug, Log, ParserRegistry } from '../globals'
import XmlParser, { XmlParserParameters } from './xml-parser'
import Validation from '../structure/validation'
import Streamer from '../streamer/streamer';
class ValidationParser extends XmlParser {
constructor (streamer: Streamer, params?: Partial<XmlParserParameters>) {
const p = params || {}
super(streamer, p)
this.useDomParser = true
this.validation = new Validation(this.name, this.path)
}
get __objName () { return 'validation' }
get isXml () { return true }
_parse () {
super._parse()
if (Debug) Log.time('ValidationParser._parse ' + this.name)
this.validation.fromXml(this.xml.data)
if (Debug) Log.timeEnd('ValidationParser._parse ' + this.name)
}
}
ParserRegistry.add('validation', ValidationParser)
export default ValidationParser<|fim▁end|> | |
<|file_name|>notify.py<|end_file_name|><|fim▁begin|>"""Mycroft AI notification platform."""
import logging
from mycroftapi import MycroftAPI
from homeassistant.components.notify import BaseNotificationService
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config, discovery_info=None):
"""Get the Mycroft notification service."""
return MycroftNotificationService(hass.data["mycroft"])
class MycroftNotificationService(BaseNotificationService):
"""The Mycroft Notification Service."""
def __init__(self, mycroft_ip):
"""Initialize the service."""
self.mycroft_ip = mycroft_ip
def send_message(self, message="", **kwargs):<|fim▁hole|> mycroft = MycroftAPI(self.mycroft_ip)
if mycroft is not None:
mycroft.speak_text(text)
else:
_LOGGER.log("Could not reach this instance of mycroft")<|fim▁end|> | """Send a message mycroft to speak on instance."""
text = message |
<|file_name|>asr1k_routertype_driver.py<|end_file_name|><|fim▁begin|># Copyright 2015 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import uuidutils
from sqlalchemy.orm import exc
from sqlalchemy.sql import expression as expr
from neutron.db import models_v2
from neutron.extensions import l3
from neutron_lib import constants as l3_constants
from neutron_lib import exceptions as n_exc
from networking_cisco._i18n import _, _LW
from networking_cisco import backwards_compatibility as bc
from networking_cisco.plugins.cisco.common import cisco_constants
from networking_cisco.plugins.cisco.db.l3 import ha_db
from networking_cisco.plugins.cisco.db.l3 import l3_models
from networking_cisco.plugins.cisco.db.l3.l3_router_appliance_db import (
L3RouterApplianceDBMixin)
from networking_cisco.plugins.cisco.extensions import routerhostingdevice
from networking_cisco.plugins.cisco.extensions import routerrole
from networking_cisco.plugins.cisco.extensions import routertype
from networking_cisco.plugins.cisco.extensions import routertypeawarescheduler
from networking_cisco.plugins.cisco.l3 import drivers
LOG = logging.getLogger(__name__)
DEVICE_OWNER_GLOBAL_ROUTER_GW = cisco_constants.DEVICE_OWNER_GLOBAL_ROUTER_GW
HOSTING_DEVICE_ATTR = routerhostingdevice.HOSTING_DEVICE_ATTR
ROUTER_ROLE_GLOBAL = cisco_constants.ROUTER_ROLE_GLOBAL
ROUTER_ROLE_LOGICAL_GLOBAL = cisco_constants.ROUTER_ROLE_LOGICAL_GLOBAL
ROUTER_ROLE_HA_REDUNDANCY = cisco_constants.ROUTER_ROLE_HA_REDUNDANCY
TENANT_HSRP_GRP_RANGE = 1
TENANT_HSRP_GRP_OFFSET = 1064
EXT_HSRP_GRP_RANGE = 1
EXT_HSRP_GRP_OFFSET = 1064
N_ROUTER_PREFIX = 'nrouter-'
DEV_NAME_LEN = 14
class TopologyNotSupportedByRouterError(n_exc.Conflict):
message = _("Requested topology cannot be supported by router.")
class ASR1kL3RouterDriver(drivers.L3RouterBaseDriver):
def create_router_precommit(self, context, router_context):
pass
def create_router_postcommit(self, context, router_context):
pass
def update_router_precommit(self, context, router_context):
pass
def update_router_postcommit(self, context, router_context):
# Whenever a gateway is added to, or removed from, a router hosted on
# a hosting device, we must ensure that a global router is running
# (for add operation) or not running (for remove operation) on that
# hosting device.
current = router_context.current
if current[HOSTING_DEVICE_ATTR] is None:
return
e_context = context.elevated()
if current['gw_port_id']:
self._conditionally_add_global_router(e_context, current)
else:
self._conditionally_remove_global_router(
e_context, router_context.original, True)
def delete_router_precommit(self, context, router_context):
pass
def delete_router_postcommit(self, context, router_context):
pass
def schedule_router_precommit(self, context, router_context):
pass
def schedule_router_postcommit(self, context, router_context):
# When the hosting device hosts a Neutron router with external
# connectivity, a "global" router (modeled as a Neutron router) must
# also run on the hosting device (outside of any VRF) to enable the
# connectivity.
current = router_context.current
if current['gw_port_id'] and current[HOSTING_DEVICE_ATTR] is not None:
self._conditionally_add_global_router(context.elevated(), current)
def unschedule_router_precommit(self, context, router_context):
pass
def unschedule_router_postcommit(self, context, router_context):
# When there is no longer any router with external gateway hosted on
# a hosting device, the global router on that hosting device can also
# be removed.
current = router_context.current
hd_id = current[HOSTING_DEVICE_ATTR]
if current['gw_port_id'] and hd_id is not None:
self._conditionally_remove_global_router(context.elevated(),
current)
def add_router_interface_precommit(self, context, r_port_context):
# Inside an ASR1k, VLAN sub-interfaces are used to connect to internal
# neutron networks. Only one such sub-interface can be created for each
# VLAN. As the VLAN sub-interface is added to the VRF representing the
# Neutron router, we must only allow one Neutron router to attach to a
# particular Neutron subnet/network.
if (r_port_context.router_context.current[routerrole.ROUTER_ROLE_ATTR]
== ROUTER_ROLE_HA_REDUNDANCY):
# redundancy routers can be exempt as we check the user visible
# routers and the request will be rejected there.
return
e_context = context.elevated()
if r_port_context.current is None:
sn = self._core_plugin.get_subnet(e_context,
r_port_context.current_subnet_id)
net_id = sn['network_id']
else:
net_id = r_port_context.current['network_id']
filters = {'network_id': [net_id],
'device_owner': [bc.constants.DEVICE_OWNER_ROUTER_INTF]}
for port in self._core_plugin.get_ports(e_context,
filters=filters):
router_id = port['device_id']
if router_id is None:
continue
router = self._l3_plugin.get_router(e_context, router_id)
if router[routerrole.ROUTER_ROLE_ATTR] is None:
raise TopologyNotSupportedByRouterError()
def add_router_interface_postcommit(self, context, r_port_context):
pass
def remove_router_interface_precommit(self, context, r_port_context):
pass
def remove_router_interface_postcommit(self, context, r_port_context):
pass
def create_floatingip_precommit(self, context, fip_context):
pass
def create_floatingip_postcommit(self, context, fip_context):
pass
def update_floatingip_precommit(self, context, fip_context):
pass
def update_floatingip_postcommit(self, context, fip_context):
pass
def delete_floatingip_precommit(self, context, fip_context):
pass
def delete_floatingip_postcommit(self, context, fip_context):
pass
def ha_interface_ip_address_needed(self, context, router, port,
ha_settings_db, ha_group_uuid):
if port['device_owner'] == bc.constants.DEVICE_OWNER_ROUTER_GW:
return False
else:
return True
def generate_ha_group_id(self, context, router, port, ha_settings_db,
ha_group_uuid):
if port['device_owner'] in {bc.constants.DEVICE_OWNER_ROUTER_GW,
DEVICE_OWNER_GLOBAL_ROUTER_GW}:
ri_name = self._router_name(router['id'])[8:DEV_NAME_LEN]
group_id = int(ri_name, 16) % TENANT_HSRP_GRP_RANGE
group_id += TENANT_HSRP_GRP_OFFSET
return group_id
else:
net_id_digits = port['network_id'][:6]
group_id = int(net_id_digits, 16) % EXT_HSRP_GRP_RANGE
group_id += EXT_HSRP_GRP_OFFSET
return group_id
def pre_backlog_processing(self, context):
filters = {routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(context, filters=filters)
if not global_routers:
LOG.debug("There are no global routers")
return
for gr in global_routers:
filters = {
HOSTING_DEVICE_ATTR: [gr[HOSTING_DEVICE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_HA_REDUNDANCY, None]
}
invert_filters = {'gw_port_id': [None]}
num_rtrs = self._l3_plugin.get_routers_count_extended(
context, filters=filters, invert_filters=invert_filters)
LOG.debug("Global router %(name)s[%(id)s] with hosting_device "
"%(hd)s has %(num)d routers with gw_port set on that "
"device",
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR], 'num': num_rtrs, })
if num_rtrs == 0:
LOG.warning(
_LW("Global router:%(name)s[id:%(id)s] is present for "
"hosting device:%(hd)s but there are no tenant or "
"redundancy routers with gateway set on that hosting "
"device. Proceeding to delete global router."),
{'name': gr['name'], 'id': gr['id'],
'hd': gr[HOSTING_DEVICE_ATTR]})
self._delete_global_router(context, gr['id'])
filters = {
#TODO(bmelande): Filter on routertype of global router
#routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if log_global_routers:
log_global_router_id = log_global_routers[0]['id']
self._delete_global_router(context, log_global_router_id,
logical=True)
def post_backlog_processing(self, context):
pass
# ---------------- Create workflow functions -----------------
def _conditionally_add_global_router(self, context, tenant_router):
# We could filter on hosting device id but we don't so we get all
# global routers for this router type. We can then use that count to
# determine which ha priority a new global router should get.
filters = {
routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL]}
global_routers = self._l3_plugin.get_routers(
context, filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
hosting_device_id = tenant_router[HOSTING_DEVICE_ATTR]
ext_nw_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
global_router = hd_to_gr_dict.get(hosting_device_id)
logical_global_router = self._get_logical_global_router(context,
tenant_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, logical_global_router, ext_nw_id, tenant_router, True)
if global_router is None:
# must create global router on hosting device
global_router = self._create_global_router(
context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router)
self._conditionally_add_auxiliary_external_gateway_port(
context, global_router, ext_nw_id, tenant_router)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
for ni in self._l3_plugin.get_notifiers(context, [global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_add_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
provision_ha=False, port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# tbe global router may or may not have an interface on the
# external network that the tenant router uses
filters = {
'device_id': [global_router['id']],
'device_owner': [port_type]}
connected_nets = {
p['network_id']: p['fixed_ips'] for p in
self._core_plugin.get_ports(context, filters=filters)}
if ext_net_id in connected_nets:
# already connected to the external network so we're done
return
else:
# not connected to the external network, so let's fix that
aux_gw_port = self._create_auxiliary_external_gateway_port(
context, global_router, ext_net_id, tenant_router, port_type)
if provision_ha:
self._provision_port_ha(context, aux_gw_port, global_router)
def _create_auxiliary_external_gateway_port(
self, context, global_router, ext_net_id, tenant_router,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
# When a global router is connected to an external network then a
# special type of gateway port is created on that network. Such a
# port is called auxiliary gateway ports. It has an ip address on
# each subnet of the external network. A (logical) global router
# never has a traditional Neutron gateway port.
filters = {
'device_id': [tenant_router['id']],
'device_owner': [l3_constants.DEVICE_OWNER_ROUTER_GW]}
# fetch the gateway port of the *tenant* router so we can determine
# the CIDR of that port's subnet
gw_port = self._core_plugin.get_ports(context,
filters=filters)[0]
fixed_ips = self._get_fixed_ips_subnets(context, gw_port)
global_router_id = global_router['id']
with context.session.begin(subtransactions=True):
aux_gw_port = self._core_plugin.create_port(context, {
'port': {
'tenant_id': '', # intentionally not set
'network_id': ext_net_id,
'mac_address': bc.constants.ATTR_NOT_SPECIFIED,
'fixed_ips': fixed_ips,
'device_id': global_router_id,
'device_owner': port_type,
'admin_state_up': True,
'name': ''}})
router_port = bc.RouterPort(
port_id=aux_gw_port['id'],
router_id=global_router_id,
port_type=port_type)
context.session.add(router_port)
return aux_gw_port
def _create_global_router(
self, context, hosting_device_id, hd_to_gr_dict, tenant_router,
logical_global_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name(hosting_device_id),
'admin_state_up': True}}
global_router, r_hd_b_db = self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR], False,
True, hosting_device_id, ROUTER_ROLE_GLOBAL)
# make the global router a redundancy router for the logical
# global router (which we treat as a hidden "user visible
# router" (how's that for a contradiction of terms! :-) )
with context.session.begin(subtransactions=True):
ha_priority = (
ha_db.DEFAULT_MASTER_PRIORITY -
len(hd_to_gr_dict) * ha_db.PRIORITY_INCREASE_STEP)
r_b_b = ha_db.RouterRedundancyBinding(
redundancy_router_id=global_router['id'],
priority=ha_priority,
user_router_id=logical_global_router['id'])
context.session.add(r_b_b)
return global_router
def _get_logical_global_router(self, context, tenant_router):
# Since HA is also enabled on the global routers on each hosting device
# those global routers need HA settings and VIPs. We represent that
# using a Neutron router that is never instantiated/hosted. That
# Neutron router is referred to as the "logical global" router.
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
logical_global_routers = self._l3_plugin.get_routers(
context, filters=filters)
if not logical_global_routers:
# must create logical global router<|fim▁hole|> logical_global_router = self._create_logical_global_router(
context, tenant_router)
else:
logical_global_router = logical_global_routers[0]
self._update_ha_redundancy_level(context, logical_global_router, 1)
return logical_global_router
def _create_logical_global_router(self, context, tenant_router):
r_spec = {'router': {
# global routers are not tied to any tenant
'tenant_id': '',
'name': self._global_router_name('', logical=True),
'admin_state_up': True,
# set auto-schedule to false to keep this router un-hosted
routertypeawarescheduler.AUTO_SCHEDULE_ATTR: False}}
# notifications should never be sent for this logical router!
logical_global_router, r_hd_b_db = (
self._l3_plugin.do_create_router(
context, r_spec, tenant_router[routertype.TYPE_ATTR],
False, True, None, ROUTER_ROLE_LOGICAL_GLOBAL))
with context.session.begin(subtransactions=True):
r_ha_s_db = ha_db.RouterHASetting(
router_id=logical_global_router['id'],
ha_type=cfg.CONF.ha.default_ha_mechanism,
redundancy_level=1,
priority=ha_db.DEFAULT_MASTER_PRIORITY,
probe_connectivity=False,
probe_target=None,
probe_interval=None)
context.session.add(r_ha_s_db)
return logical_global_router
def _get_fixed_ips_subnets(self, context, gw_port):
nw = self._core_plugin.get_network(context, gw_port['network_id'])
subnets = [{'subnet_id': s} for s in nw['subnets']]
return subnets
def _provision_port_ha(self, context, ha_port, router, ha_binding_db=None):
ha_group_uuid = uuidutils.generate_uuid()
router_id = router['id']
with context.session.begin(subtransactions=True):
if ha_binding_db is None:
ha_binding_db = self._get_ha_binding(context, router_id)
group_id = self.generate_ha_group_id(
context, router,
{'device_owner': DEVICE_OWNER_GLOBAL_ROUTER_GW}, ha_binding_db,
ha_group_uuid)
r_ha_g = ha_db.RouterHAGroup(
id=ha_group_uuid,
tenant_id='',
ha_type=ha_binding_db.ha_type,
group_identity=group_id,
ha_port_id=ha_port['id'],
extra_port_id=None,
subnet_id=ha_port['fixed_ips'][0]['subnet_id'],
user_router_id=router_id,
timers_config='',
tracking_config='',
other_config='')
context.session.add(r_ha_g)
def _get_ha_binding(self, context, router_id):
with context.session.begin(subtransactions=True):
query = context.session.query(ha_db.RouterHASetting)
query = query.filter(
ha_db.RouterHASetting.router_id == router_id)
return query.first()
# ---------------- Remove workflow functions -----------------
def _conditionally_remove_global_router(self, context, tenant_router,
update_operation=False):
filters = {routertype.TYPE_ATTR: [tenant_router[routertype.TYPE_ATTR]],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_GLOBAL],
HOSTING_DEVICE_ATTR: [tenant_router[HOSTING_DEVICE_ATTR]]}
global_routers = self._l3_plugin.get_routers(context,
filters=filters)
hd_to_gr_dict = {r[HOSTING_DEVICE_ATTR]: r for r in global_routers}
if global_routers:
global_router_id = global_routers[0]['id']
if not tenant_router or not tenant_router[l3.EXTERNAL_GW_INFO]:
# let l3 plugin's periodic backlog processing take care of the
# clean up of the global router
return
ext_net_id = tenant_router[l3.EXTERNAL_GW_INFO]['network_id']
routertype_id = tenant_router[routertype.TYPE_ATTR]
hd_id = tenant_router[HOSTING_DEVICE_ATTR]
global_router = hd_to_gr_dict.get(hd_id)
port_deleted = self._conditionally_remove_auxiliary_gateway_port(
context, global_router_id, ext_net_id, routertype_id, hd_id,
update_operation)
if port_deleted is False:
# since no auxiliary gateway port was deleted we can
# abort no since auxiliary gateway port count cannot
# have reached zero
return
filters = {
'device_id': [global_router_id],
'device_owner': [DEVICE_OWNER_GLOBAL_ROUTER_GW]}
num_aux_gw_ports = self._core_plugin.get_ports_count(
context, filters=filters)
if num_aux_gw_ports == 0:
# global router not needed any more so we delete it
self._delete_global_router(context, global_router_id)
do_notify = False
else:
do_notify = True
# process logical global router to remove its port
self._conditionally_remove_auxiliary_gateway_vip_port(
context, ext_net_id, routertype_id)
self._l3_plugin.add_type_and_hosting_device_info(context,
global_router)
if do_notify is True:
for ni in self._l3_plugin.get_notifiers(context,
[global_router]):
if ni['notifier']:
ni['notifier'].routers_updated(context, ni['routers'])
def _conditionally_remove_auxiliary_gateway_port(
self, context, router_id, ext_net_id, routertype_id,
hosting_device_id, update_operation=False):
num_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, None, hosting_device_id)
if ((num_rtrs <= 1 and update_operation is False) or
(num_rtrs == 0 and update_operation is True)):
# there are no tenant routers *on ext_net_id* that are serviced by
# this global router so it's aux gw port can be deleted
self._delete_auxiliary_gateway_ports(context, router_id,
ext_net_id)
return True
return False
def _conditionally_remove_auxiliary_gateway_vip_port(
self, context, ext_net_id, routertype_id):
filters = {routertype.TYPE_ATTR: [routertype_id],
routerrole.ROUTER_ROLE_ATTR: [ROUTER_ROLE_LOGICAL_GLOBAL]}
log_global_routers = self._l3_plugin.get_routers(context,
filters=filters)
if not log_global_routers:
return
self._update_ha_redundancy_level(context, log_global_routers[0], -1)
log_global_router_id = log_global_routers[0]['id']
num_global_rtrs = self._get_gateway_routers_count(
context, ext_net_id, routertype_id, ROUTER_ROLE_GLOBAL)
if num_global_rtrs == 0:
# there are no global routers *on ext_net_id* that are serviced by
# this logical global router so it's aux gw VIP port can be deleted
self._delete_auxiliary_gateway_ports(context, log_global_router_id,
ext_net_id)
filters[routerrole.ROUTER_ROLE_ATTR] = [ROUTER_ROLE_GLOBAL]
total_num_global_rtrs = self._l3_plugin.get_routers_count(
context, filters=filters)
if total_num_global_rtrs == 0:
# there are no global routers left that are serviced by this
# logical global router so it can be deleted
self._delete_global_router(context, log_global_router_id, True)
return False
def _delete_auxiliary_gateway_ports(
self, context, router_id, net_id=None,
port_type=DEVICE_OWNER_GLOBAL_ROUTER_GW):
filters = {
'device_id': [router_id],
'device_owner': [port_type]}
if net_id is not None:
filters['network_id'] = [net_id]
for port in self._core_plugin.get_ports(context, filters=filters):
try:
self._core_plugin.delete_port(context, port['id'],
l3_port_check=False)
except (exc.ObjectDeletedError, n_exc.PortNotFound) as e:
LOG.warning(e)
def _delete_global_router(self, context, global_router_id, logical=False):
# ensure we clean up any stale auxiliary gateway ports
self._delete_auxiliary_gateway_ports(context, global_router_id)
try:
if logical is True:
# We use parent class method as no special operations beyond
# what the base implemenation does are needed for logical
# global router
super(L3RouterApplianceDBMixin, self._l3_plugin).delete_router(
context, global_router_id)
else:
self._l3_plugin.delete_router(
context, global_router_id, unschedule=False)
except (exc.ObjectDeletedError, l3.RouterNotFound) as e:
LOG.warning(e)
def _get_gateway_routers_count(self, context, ext_net_id, routertype_id,
router_role, hosting_device_id=None):
# Determine number of routers (with routertype_id and router_role)
# that act as gateway to ext_net_id and that are hosted on
# hosting_device_id (if specified).
query = context.session.query(bc.Router)
if router_role in [None, ROUTER_ROLE_HA_REDUNDANCY]:
# tenant router roles
query = query.join(models_v2.Port,
models_v2.Port.id == bc.Router.gw_port_id)
role_filter = expr.or_(
l3_models.RouterHostingDeviceBinding.role == expr.null(),
l3_models.RouterHostingDeviceBinding.role ==
ROUTER_ROLE_HA_REDUNDANCY)
else:
# global and logical global routers
query = query.join(models_v2.Port,
models_v2.Port.device_owner == bc.Router.id)
role_filter = (
l3_models.RouterHostingDeviceBinding.role == router_role)
query = query.join(
l3_models.RouterHostingDeviceBinding,
l3_models.RouterHostingDeviceBinding.router_id == bc.Router.id)
query = query.filter(
role_filter,
models_v2.Port.network_id == ext_net_id,
l3_models.RouterHostingDeviceBinding.router_type_id ==
routertype_id)
if hosting_device_id is not None:
query = query.filter(
l3_models.RouterHostingDeviceBinding.hosting_device_id ==
hosting_device_id)
return query.count()
# ---------------- General support functions -----------------
def _update_ha_redundancy_level(self, context, logical_global_router,
delta):
with context.session.begin(subtransactions=True):
log_g_router_db = self._l3_plugin._get_router(
context, logical_global_router['id'])
log_g_router_db.ha_settings.redundancy_level += delta
context.session.add(log_g_router_db.ha_settings)
def _router_name(self, router_id):
return N_ROUTER_PREFIX + router_id
def _global_router_name(self, hosting_device_id, logical=False):
if logical is True:
return cisco_constants.LOGICAL_ROUTER_ROLE_NAME
else:
return '%s-%s' % (cisco_constants.ROUTER_ROLE_NAME_PREFIX,
hosting_device_id[-cisco_constants.ROLE_ID_LEN:])
@property
def _core_plugin(self):
return bc.get_plugin()
@property
def _l3_plugin(self):
return bc.get_plugin(bc.constants.L3)<|fim▁end|> | |
<|file_name|>constellation.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use pipeline::{Pipeline, CompositionPipeline};
use compositor_task::CompositorProxy;
use compositor_task::Msg as CompositorMsg;
use devtools_traits::{DevtoolsControlChan, DevtoolsControlMsg};
use geom::rect::{Rect, TypedRect};
use geom::scale_factor::ScaleFactor;
use gfx::font_cache_task::FontCacheTask;
use layers::geometry::DevicePixel;
use layout_traits::LayoutTaskFactory;
use libc;
use script_traits::{CompositorEvent, ConstellationControlMsg};
use script_traits::{ScriptControlChan, ScriptTaskFactory};
use msg::compositor_msg::LayerId;
use msg::constellation_msg::{self, ConstellationChan, Failure};
use msg::constellation_msg::{IFrameSandboxState, NavigationDirection};
use msg::constellation_msg::{Key, KeyState, KeyModifiers};
use msg::constellation_msg::{LoadData, NavigationType};
use msg::constellation_msg::{PipelineExitType, PipelineId};
use msg::constellation_msg::{SubpageId, WindowSizeData};
use msg::constellation_msg::Msg as ConstellationMsg;
use net::image_cache_task::{ImageCacheTask, ImageCacheTaskClient};
use net::resource_task::ResourceTask;
use net::resource_task;
use net::storage_task::{StorageTask, StorageTaskMsg};
use util::cursor::Cursor;
use util::geometry::{PagePx, ViewportPx};
use util::opts;
use util::task::spawn_named;
use util::time::TimeProfilerChan;
use std::borrow::ToOwned;
use std::cell::{Cell, RefCell};
use std::collections::{HashMap, HashSet};
use std::old_io as io;
use std::mem::replace;
use std::rc::Rc;
use std::sync::mpsc::{Receiver, channel};
use url::Url;
/// Maintains the pipelines and navigation context and grants permission to composite.
pub struct Constellation<LTF, STF> {
/// A channel through which messages can be sent to this object.
pub chan: ConstellationChan,
/// Receives messages.
pub request_port: Receiver<ConstellationMsg>,
/// A channel (the implementation of which is port-specific) through which messages can be sent
/// to the compositor.
pub compositor_proxy: Box<CompositorProxy>,
/// A channel through which messages can be sent to the resource task.
pub resource_task: ResourceTask,
/// A channel through which messages can be sent to the image cache task.
pub image_cache_task: ImageCacheTask,
/// A channel through which messages can be sent to the developer tools.
devtools_chan: Option<DevtoolsControlChan>,
/// A channel through which messages can be sent to the storage task.
storage_task: StorageTask,
/// A list of all the pipelines. (See the `pipeline` module for more details.)
pipelines: HashMap<PipelineId, Rc<Pipeline>>,
/// A channel through which messages can be sent to the font cache.
font_cache_task: FontCacheTask,
navigation_context: NavigationContext,
/// The next free ID to assign to a pipeline.
next_pipeline_id: PipelineId,
/// The next free ID to assign to a frame.
next_frame_id: FrameId,
/// Navigation operations that are in progress.
pending_frames: Vec<FrameChange>,
pending_sizes: HashMap<(PipelineId, SubpageId), TypedRect<PagePx, f32>>,
/// A channel through which messages can be sent to the time profiler.
pub time_profiler_chan: TimeProfilerChan,
pub window_size: WindowSizeData,
}
/// A unique ID used to identify a frame.
#[derive(Copy)]
pub struct FrameId(u32);
/// One frame in the hierarchy.
struct FrameTree {
/// The ID of this frame.
pub id: FrameId,
/// The pipeline for this frame.
pub pipeline: RefCell<Rc<Pipeline>>,
/// The parent frame's pipeline.
pub parent: RefCell<Option<Rc<Pipeline>>>,
/// A vector of child frames.
pub children: RefCell<Vec<ChildFrameTree>>,
/// Whether this frame has a compositor layer.
pub has_compositor_layer: Cell<bool>,
}
impl FrameTree {
fn new(id: FrameId, pipeline: Rc<Pipeline>, parent_pipeline: Option<Rc<Pipeline>>)
-> FrameTree {
FrameTree {
id: id,
pipeline: RefCell::new(pipeline.clone()),
parent: RefCell::new(parent_pipeline),
children: RefCell::new(vec!()),
has_compositor_layer: Cell::new(false),
}
}
fn add_child(&self, new_child: ChildFrameTree) {
self.children.borrow_mut().push(new_child);
}
}
#[derive(Clone)]
struct ChildFrameTree {
frame_tree: Rc<FrameTree>,
/// Clipping rect representing the size and position, in page coordinates, of the visible
/// region of the child frame relative to the parent.
pub rect: Option<TypedRect<PagePx, f32>>,
}
impl ChildFrameTree {
fn new(frame_tree: Rc<FrameTree>, rect: Option<TypedRect<PagePx, f32>>) -> ChildFrameTree {
ChildFrameTree {
frame_tree: frame_tree,
rect: rect,
}
}
}
pub struct SendableFrameTree {
pub pipeline: CompositionPipeline,
pub children: Vec<SendableChildFrameTree>,
}
pub struct SendableChildFrameTree {
pub frame_tree: SendableFrameTree,
pub rect: Option<TypedRect<PagePx, f32>>,
}
enum ReplaceResult {
ReplacedNode(Rc<FrameTree>),
OriginalNode(Rc<FrameTree>),
}
impl FrameTree {
fn to_sendable(&self) -> SendableFrameTree {
SendableFrameTree {
pipeline: self.pipeline.borrow().to_sendable(),
children: self.children
.borrow()
.iter()
.map(|frame_tree| frame_tree.to_sendable())
.collect(),
}
}
}
trait FrameTreeTraversal {
fn contains(&self, id: PipelineId) -> bool;
fn find(&self, id: PipelineId) -> Option<Self>;
fn find_with_subpage_id(&self, id: Option<SubpageId>) -> Option<Rc<FrameTree>>;
fn replace_child(&self, id: PipelineId, new_child: Self) -> ReplaceResult;
fn iter(&self) -> FrameTreeIterator;
}
impl FrameTreeTraversal for Rc<FrameTree> {
fn contains(&self, id: PipelineId) -> bool {
self.iter().any(|frame_tree| id == frame_tree.pipeline.borrow().id)
}
/// Returns the frame tree whose key is id
fn find(&self, id: PipelineId) -> Option<Rc<FrameTree>> {
self.iter().find(|frame_tree| id == frame_tree.pipeline.borrow().id)
}
/// Returns the frame tree whose subpage is id
fn find_with_subpage_id(&self, id: Option<SubpageId>) -> Option<Rc<FrameTree>> {
self.iter().find(|frame_tree| id == frame_tree.pipeline.borrow().subpage_id())
}
/// Replaces a node of the frame tree in place. Returns the node that was removed or the
/// original node if the node to replace could not be found.
fn replace_child(&self, id: PipelineId, new_child: Rc<FrameTree>) -> ReplaceResult {
for frame_tree in self.iter() {
let mut children = frame_tree.children.borrow_mut();
let child = children.iter_mut()
.find(|child| child.frame_tree.pipeline.borrow().id == id);
match child {
Some(child) => {
*new_child.parent.borrow_mut() = child.frame_tree.parent.borrow().clone();
return ReplaceResult::ReplacedNode(replace(&mut child.frame_tree, new_child));
}
None => (),
}
}
ReplaceResult::OriginalNode(new_child)
}
fn iter(&self) -> FrameTreeIterator {
FrameTreeIterator {
stack: vec!(self.clone()),
}
}
}
impl ChildFrameTree {
fn to_sendable(&self) -> SendableChildFrameTree {
SendableChildFrameTree {
frame_tree: self.frame_tree.to_sendable(),
rect: self.rect,
}
}
}
/// An iterator over a frame tree, returning nodes in depth-first order.
/// Note that this iterator should _not_ be used to mutate nodes _during_
/// iteration. Mutating nodes once the iterator is out of scope is OK.
struct FrameTreeIterator {
stack: Vec<Rc<FrameTree>>,
}
impl Iterator for FrameTreeIterator {
type Item = Rc<FrameTree>;
fn next(&mut self) -> Option<Rc<FrameTree>> {
match self.stack.pop() {
Some(next) => {
for cft in next.children.borrow().iter() {
self.stack.push(cft.frame_tree.clone());
}
Some(next)
}
None => None,
}
}
}
/// Represents the portion of a page that is changing in navigating.
struct FrameChange {
/// The old pipeline ID.
pub before: Option<PipelineId>,
/// The resulting frame tree after navigation.
pub after: Rc<FrameTree>,
/// The kind of navigation that is occurring.
pub navigation_type: NavigationType,
}
/// Stores the Id's of the pipelines previous and next in the browser's history
struct NavigationContext {
previous: Vec<Rc<FrameTree>>,
next: Vec<Rc<FrameTree>>,
current: Option<Rc<FrameTree>>,
}
impl NavigationContext {
fn new() -> NavigationContext {
NavigationContext {
previous: vec!(),
next: vec!(),
current: None,
}
}
/* Note that the following two methods can fail. They should only be called *
* when it is known that there exists either a previous page or a next page. */
fn back(&mut self, compositor_proxy: &mut CompositorProxy) -> Rc<FrameTree> {
self.next.push(self.current.take().unwrap());
let prev = self.previous.pop().unwrap();
self.set_current(prev.clone(), compositor_proxy);
prev
}
fn forward(&mut self, compositor_proxy: &mut CompositorProxy) -> Rc<FrameTree> {
self.previous.push(self.current.take().unwrap());
let next = self.next.pop().unwrap();
self.set_current(next.clone(), compositor_proxy);
next
}
/// Loads a new set of page frames, returning all evicted frame trees
fn load(&mut self, frame_tree: Rc<FrameTree>, compositor_proxy: &mut CompositorProxy)
-> Vec<Rc<FrameTree>> {
debug!("navigating to {:?}", frame_tree.pipeline.borrow().id);
let evicted = replace(&mut self.next, vec!());
match self.current.take() {
Some(current) => self.previous.push(current),
None => (),
}
self.set_current(frame_tree, compositor_proxy);
evicted
}
/// Returns the frame trees whose keys are pipeline_id.
fn find_all(&mut self, pipeline_id: PipelineId) -> Vec<Rc<FrameTree>> {
let from_current = self.current.iter().filter_map(|frame_tree| {
frame_tree.find(pipeline_id)
});
let from_next = self.next.iter().filter_map(|frame_tree| {
frame_tree.find(pipeline_id)
});
let from_prev = self.previous.iter().filter_map(|frame_tree| {
frame_tree.find(pipeline_id)
});
from_prev.chain(from_current).chain(from_next).collect()
}
fn contains(&mut self, pipeline_id: PipelineId) -> bool {
let from_current = self.current.iter();
let from_next = self.next.iter();
let from_prev = self.previous.iter();
let mut all_contained = from_prev.chain(from_current).chain(from_next);
all_contained.any(|frame_tree| {
frame_tree.contains(pipeline_id)
})
}
/// Always use this method to set the currently-displayed frame. It correctly informs the
/// compositor of the new URLs.
fn set_current(&mut self, new_frame: Rc<FrameTree>, compositor_proxy: &mut CompositorProxy) {
self.current = Some(new_frame.clone());
compositor_proxy.send(CompositorMsg::ChangePageLoadData(
new_frame.id,
new_frame.pipeline.borrow().load_data.clone()));
}
}
impl<LTF: LayoutTaskFactory, STF: ScriptTaskFactory> Constellation<LTF, STF> {
pub fn start(compositor_proxy: Box<CompositorProxy+Send>,
resource_task: ResourceTask,
image_cache_task: ImageCacheTask,
font_cache_task: FontCacheTask,
time_profiler_chan: TimeProfilerChan,
devtools_chan: Option<DevtoolsControlChan>,
storage_task: StorageTask)
-> ConstellationChan {
let (constellation_port, constellation_chan) = ConstellationChan::new();
let constellation_chan_clone = constellation_chan.clone();
spawn_named("Constellation".to_owned(), move || {
let mut constellation: Constellation<LTF, STF> = Constellation {
chan: constellation_chan_clone,
request_port: constellation_port,
compositor_proxy: compositor_proxy,
devtools_chan: devtools_chan,
resource_task: resource_task,
image_cache_task: image_cache_task,
font_cache_task: font_cache_task,
storage_task: storage_task,
pipelines: HashMap::new(),
navigation_context: NavigationContext::new(),<|fim▁hole|> time_profiler_chan: time_profiler_chan,
window_size: WindowSizeData {
visible_viewport: opts::get().initial_window_size.as_f32() * ScaleFactor(1.0),
initial_viewport: opts::get().initial_window_size.as_f32() * ScaleFactor(1.0),
device_pixel_ratio: ScaleFactor(1.0),
},
};
constellation.run();
});
constellation_chan
}
fn run(&mut self) {
loop {
let request = self.request_port.recv().unwrap();
if !self.handle_request(request) {
break;
}
}
}
/// Helper function for creating a pipeline
fn new_pipeline(&mut self,
id: PipelineId,
parent: Option<(PipelineId, SubpageId)>,
script_pipeline: Option<Rc<Pipeline>>,
load_data: LoadData)
-> Rc<Pipeline> {
let pipe = Pipeline::create::<LTF, STF>(id,
parent,
self.chan.clone(),
self.compositor_proxy.clone_compositor_proxy(),
self.devtools_chan.clone(),
self.image_cache_task.clone(),
self.font_cache_task.clone(),
self.resource_task.clone(),
self.storage_task.clone(),
self.time_profiler_chan.clone(),
self.window_size,
script_pipeline,
load_data.clone());
pipe.load();
Rc::new(pipe)
}
/// Helper function for getting a unique pipeline ID.
fn get_next_pipeline_id(&mut self) -> PipelineId {
let id = self.next_pipeline_id;
let PipelineId(ref mut i) = self.next_pipeline_id;
*i += 1;
id
}
/// Helper function for getting a unique frame ID.
fn get_next_frame_id(&mut self) -> FrameId {
let id = self.next_frame_id;
let FrameId(ref mut i) = self.next_frame_id;
*i += 1;
id
}
/// Convenience function for getting the currently active frame tree.
/// The currently active frame tree should always be the current painter
fn current_frame<'a>(&'a self) -> &'a Option<Rc<FrameTree>> {
&self.navigation_context.current
}
/// Returns both the navigation context and pending frame trees whose keys are pipeline_id.
fn find_all(&mut self, pipeline_id: PipelineId) -> Vec<Rc<FrameTree>> {
let mut matching_navi_frames = self.navigation_context.find_all(pipeline_id);
matching_navi_frames.extend(self.pending_frames.iter().filter_map(|frame_change| {
frame_change.after.find(pipeline_id)
}));
matching_navi_frames
}
/// Handles loading pages, navigation, and granting access to the compositor
fn handle_request(&mut self, request: ConstellationMsg) -> bool {
match request {
ConstellationMsg::Exit => {
debug!("constellation exiting");
self.handle_exit();
return false;
}
ConstellationMsg::Failure(Failure { pipeline_id, parent }) => {
self.handle_failure_msg(pipeline_id, parent);
}
// This should only be called once per constellation, and only by the browser
ConstellationMsg::InitLoadUrl(url) => {
debug!("constellation got init load URL message");
self.handle_init_load(url);
}
// A layout assigned a size and position to a subframe. This needs to be reflected by
// all frame trees in the navigation context containing the subframe.
ConstellationMsg::FrameRect(pipeline_id, subpage_id, rect) => {
debug!("constellation got frame rect message");
self.handle_frame_rect_msg(pipeline_id, subpage_id, Rect::from_untyped(&rect));
}
ConstellationMsg::ScriptLoadedURLInIFrame(url, source_pipeline_id, new_subpage_id, old_subpage_id, sandbox) => {
debug!("constellation got iframe URL load message");
self.handle_script_loaded_url_in_iframe_msg(url,
source_pipeline_id,
new_subpage_id,
old_subpage_id,
sandbox);
}
ConstellationMsg::SetCursor(cursor) => self.handle_set_cursor_msg(cursor),
// Load a new page, usually -- but not always -- from a mouse click or typed url
// If there is already a pending page (self.pending_frames), it will not be overridden;
// However, if the id is not encompassed by another change, it will be.
ConstellationMsg::LoadUrl(source_id, load_data) => {
debug!("constellation got URL load message");
self.handle_load_url_msg(source_id, load_data);
}
// A page loaded through one of several methods above has completed all parsing,
// script, and reflow messages have been sent.
ConstellationMsg::LoadComplete => {
debug!("constellation got load complete message");
self.compositor_proxy.send(CompositorMsg::LoadComplete);
}
// Handle a forward or back request
ConstellationMsg::Navigate(direction) => {
debug!("constellation got navigation message");
self.handle_navigate_msg(direction);
}
// Notification that painting has finished and is requesting permission to paint.
ConstellationMsg::PainterReady(pipeline_id) => {
debug!("constellation got painter ready message");
self.handle_painter_ready_msg(pipeline_id);
}
ConstellationMsg::ResizedWindow(new_size) => {
debug!("constellation got window resize message");
self.handle_resized_window_msg(new_size);
}
ConstellationMsg::KeyEvent(key, state, modifiers) => {
debug!("constellation got key event message");
self.handle_key_msg(key, state, modifiers);
}
ConstellationMsg::GetPipelineTitle(pipeline_id) => {
debug!("constellation got get-pipeline-title message");
self.handle_get_pipeline_title_msg(pipeline_id);
}
}
true
}
fn handle_exit(&mut self) {
for (_id, ref pipeline) in self.pipelines.iter() {
pipeline.exit(PipelineExitType::Complete);
}
self.image_cache_task.exit();
self.resource_task.send(resource_task::ControlMsg::Exit).unwrap();
self.devtools_chan.as_ref().map(|chan| {
chan.send(DevtoolsControlMsg::ServerExitMsg).unwrap();
});
self.storage_task.send(StorageTaskMsg::Exit).unwrap();
self.font_cache_task.exit();
self.compositor_proxy.send(CompositorMsg::ShutdownComplete);
}
fn handle_failure_msg(&mut self, pipeline_id: PipelineId, parent: Option<(PipelineId, SubpageId)>) {
debug!("handling failure message from pipeline {:?}, {:?}", pipeline_id, parent);
if opts::get().hard_fail {
// It's quite difficult to make Servo exit cleanly if some tasks have failed.
// Hard fail exists for test runners so we crash and that's good enough.
let mut stderr = io::stderr();
stderr.write_str("Pipeline failed in hard-fail mode. Crashing!\n").unwrap();
stderr.flush().unwrap();
unsafe { libc::exit(1); }
}
let old_pipeline = match self.pipelines.get(&pipeline_id) {
None => {
debug!("no existing pipeline found; bailing out of failure recovery.");
return; // already failed?
}
Some(pipeline) => pipeline.clone()
};
old_pipeline.force_exit();
self.compositor_proxy.send(CompositorMsg::PaintTaskExited(old_pipeline.id));
self.pipelines.remove(&pipeline_id);
loop {
let idx = self.pending_frames.iter().position(|pending| {
pending.after.pipeline.borrow().id == pipeline_id
});
match idx {
Some(idx) => {
debug!("removing pending frame change for failed pipeline");
self.pending_frames[idx].after.pipeline.borrow().force_exit();
self.compositor_proxy.send(CompositorMsg::PaintTaskExited(old_pipeline.id));
self.pending_frames.remove(idx);
},
None => break,
}
}
debug!("creating replacement pipeline for about:failure");
let new_id = self.get_next_pipeline_id();
let new_frame_id = self.get_next_frame_id();
let pipeline = self.new_pipeline(new_id, parent, None,
LoadData::new(Url::parse("about:failure").unwrap()));
self.browse(Some(pipeline_id),
Rc::new(FrameTree::new(new_frame_id, pipeline.clone(), None)),
NavigationType::Load);
self.pipelines.insert(new_id, pipeline);
}
/// Performs navigation. This pushes a `FrameChange` object onto the list of pending frames.
///
/// TODO(pcwalton): Send a `BeforeBrowse` message to the embedder and allow cancellation.
fn browse(&mut self,
before: Option<PipelineId>,
after: Rc<FrameTree>,
navigation_type: NavigationType) {
self.pending_frames.push(FrameChange {
before: before,
after: after,
navigation_type: navigation_type,
});
}
fn handle_init_load(&mut self, url: Url) {
let next_pipeline_id = self.get_next_pipeline_id();
let next_frame_id = self.get_next_frame_id();
let pipeline = self.new_pipeline(next_pipeline_id, None, None, LoadData::new(url));
self.browse(None,
Rc::new(FrameTree::new(next_frame_id, pipeline.clone(), None)),
NavigationType::Load);
self.pipelines.insert(pipeline.id, pipeline);
}
fn handle_frame_rect_msg(&mut self, pipeline_id: PipelineId, subpage_id: SubpageId,
rect: TypedRect<PagePx, f32>) {
debug!("Received frame rect {:?} from {:?}, {:?}", rect, pipeline_id, subpage_id);
let mut already_sent = HashSet::new();
// Returns true if a child frame tree's subpage id matches the given subpage id
let subpage_eq = |&:child_frame_tree: & &mut ChildFrameTree| {
child_frame_tree.frame_tree.pipeline.borrow().
subpage_id().expect("Constellation:
child frame does not have a subpage id. This should not be possible.")
== subpage_id
};
let frames = self.find_all(pipeline_id);
{
// If the subframe is in the current frame tree, the compositor needs the new size
for current_frame in self.navigation_context.current.iter() {
debug!("Constellation: Sending size for frame in current frame tree.");
let source_frame = current_frame.find(pipeline_id);
for source_frame in source_frame.iter() {
let mut children = source_frame.children.borrow_mut();
match children.iter_mut().find(|child| subpage_eq(child)) {
None => {}
Some(child) => {
let has_compositor_layer = child.frame_tree.has_compositor_layer.get();
update_child_rect(child,
rect,
has_compositor_layer,
&mut already_sent,
&mut self.compositor_proxy,
self.window_size.device_pixel_ratio)
}
}
}
}
// Update all frames with matching pipeline- and subpage-ids
for frame_tree in frames.iter() {
let mut children = frame_tree.children.borrow_mut();
let found_child = children.iter_mut().find(|child| subpage_eq(child));
found_child.map(|child| {
update_child_rect(child,
rect,
false,
&mut already_sent,
&mut self.compositor_proxy,
self.window_size.device_pixel_ratio)
});
}
}
// At this point, if no pipelines were sent a resize msg, then this subpage id
// should be added to pending sizes
if already_sent.len() == 0 {
self.pending_sizes.insert((pipeline_id, subpage_id), rect);
}
// Update a child's frame rect and inform its script task of the change,
// if it hasn't been already. Optionally inform the compositor if
// resize happens immediately.
fn update_child_rect(child_frame_tree: &mut ChildFrameTree,
rect: TypedRect<PagePx,f32>,
is_active: bool,
already_sent: &mut HashSet<PipelineId>,
compositor_proxy: &mut Box<CompositorProxy>,
device_pixel_ratio: ScaleFactor<ViewportPx,DevicePixel,f32>) {
child_frame_tree.rect = Some(rect);
// NOTE: work around borrowchk issues
let pipeline = &*child_frame_tree.frame_tree.pipeline.borrow();
if !already_sent.contains(&pipeline.id) {
if is_active {
let ScriptControlChan(ref script_chan) = pipeline.script_chan;
script_chan.send(ConstellationControlMsg::Resize(pipeline.id, WindowSizeData {
visible_viewport: rect.size,
initial_viewport: rect.size * ScaleFactor(1.0),
device_pixel_ratio: device_pixel_ratio,
})).unwrap();
compositor_proxy.send(CompositorMsg::SetLayerOrigin(
pipeline.id,
LayerId::null(),
rect.to_untyped().origin));
} else {
already_sent.insert(pipeline.id);
}
};
}
}
fn update_child_pipeline(&mut self,
frame_tree: Rc<FrameTree>,
new_pipeline: Rc<Pipeline>,
old_subpage_id: SubpageId) {
let existing_tree = match frame_tree.find_with_subpage_id(Some(old_subpage_id)) {
Some(existing_tree) => existing_tree.clone(),
None => panic!("Tried to update non-existing frame tree with pipeline={:?} subpage={:?}",
new_pipeline.id,
old_subpage_id),
};
let old_pipeline = existing_tree.pipeline.borrow().clone();
*existing_tree.pipeline.borrow_mut() = new_pipeline.clone();
// If we have not yet sent this frame to the compositor for layer creation, we don't
// need to inform the compositor of updates to the pipeline.
if !existing_tree.has_compositor_layer.get() {
return;
}
let (chan, port) = channel();
self.compositor_proxy.send(CompositorMsg::ChangeLayerPipelineAndRemoveChildren(
old_pipeline.to_sendable(),
new_pipeline.to_sendable(),
chan));
let _ = port.recv();
}
fn create_or_update_child_pipeline(&mut self,
frame_tree: Rc<FrameTree>,
new_pipeline: Rc<Pipeline>,
new_rect: Option<TypedRect<PagePx, f32>>,
old_subpage_id: Option<SubpageId>) {
match old_subpage_id {
Some(old_subpage_id) =>
self.update_child_pipeline(frame_tree.clone(), new_pipeline, old_subpage_id),
None => {
let child_tree = Rc::new(
FrameTree::new(self.get_next_frame_id(),
new_pipeline,
Some(frame_tree.pipeline.borrow().clone())));
frame_tree.add_child(ChildFrameTree::new(child_tree, new_rect));
}
}
}
// The script task associated with pipeline_id has loaded a URL in an iframe via script. This
// will result in a new pipeline being spawned and a frame tree being added to
// containing_page_pipeline_id's frame tree's children. This message is never the result of a
// page navigation.
fn handle_script_loaded_url_in_iframe_msg(&mut self,
url: Url,
containing_page_pipeline_id: PipelineId,
new_subpage_id: SubpageId,
old_subpage_id: Option<SubpageId>,
sandbox: IFrameSandboxState) {
// Start by finding the frame trees matching the pipeline id,
// and add the new pipeline to their sub frames.
let frame_trees = self.find_all(containing_page_pipeline_id);
if frame_trees.is_empty() {
panic!("Constellation: source pipeline id of ScriptLoadedURLInIFrame is not in
navigation context, nor is it in a pending frame. This should be
impossible.");
}
// Compare the pipeline's url to the new url. If the origin is the same,
// then reuse the script task in creating the new pipeline
let source_pipeline = self.pipelines.get(&containing_page_pipeline_id).expect("Constellation:
source Id of ScriptLoadedURLInIFrameMsg does have an associated pipeline in
constellation. This should be impossible.").clone();
let source_url = source_pipeline.load_data.url.clone();
let same_script = (source_url.host() == url.host() &&
source_url.port() == url.port()) &&
sandbox == IFrameSandboxState::IFrameUnsandboxed;
// FIXME(tkuehn): Need to follow the standardized spec for checking same-origin
// Reuse the script task if the URL is same-origin
let script_pipeline = if same_script {
debug!("Constellation: loading same-origin iframe at {:?}", url);
Some(source_pipeline.clone())
} else {
debug!("Constellation: loading cross-origin iframe at {:?}", url);
None
};
let new_frame_pipeline_id = self.get_next_pipeline_id();
let pipeline = self.new_pipeline(
new_frame_pipeline_id,
Some((containing_page_pipeline_id, new_subpage_id)),
script_pipeline,
LoadData::new(url)
);
let rect = self.pending_sizes.remove(&(containing_page_pipeline_id, new_subpage_id));
for frame_tree in frame_trees.iter() {
self.create_or_update_child_pipeline(frame_tree.clone(),
pipeline.clone(),
rect,
old_subpage_id);
}
self.pipelines.insert(pipeline.id, pipeline);
}
fn handle_set_cursor_msg(&mut self, cursor: Cursor) {
self.compositor_proxy.send(CompositorMsg::SetCursor(cursor))
}
fn handle_load_url_msg(&mut self, source_id: PipelineId, load_data: LoadData) {
let url = load_data.url.to_string();
debug!("Constellation: received message to load {:?}", url);
// Make sure no pending page would be overridden.
let source_frame = self.current_frame().as_ref().unwrap().find(source_id).expect(
"Constellation: received a LoadUrl message from a pipeline_id associated
with a pipeline not in the active frame tree. This should be
impossible.");
for frame_change in self.pending_frames.iter() {
let old_id = frame_change.before.expect("Constellation: Received load msg
from pipeline, but there is no currently active page. This should
be impossible.");
let changing_frame = self.current_frame().as_ref().unwrap().find(old_id).expect("Constellation:
Pending change has non-active source pipeline. This should be
impossible.");
if changing_frame.contains(source_id) || source_frame.contains(old_id) {
// id that sent load msg is being changed already; abort
return;
}
}
// Being here means either there are no pending frames, or none of the pending
// changes would be overridden by changing the subframe associated with source_id.
let parent = source_frame.parent.clone();
let parent_id = source_frame.pipeline.borrow().parent;
let next_pipeline_id = self.get_next_pipeline_id();
let next_frame_id = self.get_next_frame_id();
let pipeline = self.new_pipeline(next_pipeline_id, parent_id, None, load_data);
self.browse(Some(source_id),
Rc::new(FrameTree::new(next_frame_id,
pipeline.clone(),
parent.borrow().clone())),
NavigationType::Load);
// Send message to ScriptTask that will suspend all timers
source_frame.pipeline.borrow().freeze();
self.pipelines.insert(pipeline.id, pipeline);
}
fn handle_navigate_msg(&mut self, direction: constellation_msg::NavigationDirection) {
debug!("received message to navigate {:?}", direction);
// TODO(tkuehn): what is the "critical point" beyond which pending frames
// should not be cleared? Currently, the behavior is that forward/back
// navigation always has navigation priority, and after that new page loading is
// first come, first served.
let destination_frame = match direction {
NavigationDirection::Forward => {
if self.navigation_context.next.is_empty() {
debug!("no next page to navigate to");
return;
} else {
let old = self.current_frame().as_ref().unwrap();
for frame in old.iter() {
frame.pipeline.borrow().revoke_paint_permission();
frame.pipeline.borrow().freeze();
}
}
self.navigation_context.forward(&mut *self.compositor_proxy)
}
NavigationDirection::Back => {
if self.navigation_context.previous.is_empty() {
debug!("no previous page to navigate to");
return;
} else {
let old = self.current_frame().as_ref().unwrap();
for frame in old.iter() {
frame.pipeline.borrow().revoke_paint_permission();
frame.pipeline.borrow().freeze();
}
}
self.navigation_context.back(&mut *self.compositor_proxy)
}
};
for frame in destination_frame.iter() {
frame.pipeline.borrow().load();
frame.pipeline.borrow().thaw();
}
self.send_frame_tree_and_grant_paint_permission(destination_frame);
}
fn pipeline_is_in_current_frame(&self, pipeline_id: PipelineId) -> bool {
self.current_frame().iter()
.any(|current_frame| current_frame.contains(pipeline_id))
}
fn handle_key_msg(&self, key: Key, state: KeyState, mods: KeyModifiers) {
match *self.current_frame() {
Some(ref frame) => {
let ScriptControlChan(ref chan) = frame.pipeline.borrow().script_chan;
chan.send(ConstellationControlMsg::SendEvent(
frame.pipeline.borrow().id,
CompositorEvent::KeyEvent(key, state, mods))).unwrap();
},
None => self.compositor_proxy.clone_compositor_proxy()
.send(CompositorMsg::KeyEvent(key, state, mods))
}
}
fn handle_get_pipeline_title_msg(&mut self, pipeline_id: PipelineId) {
match self.pipelines.get(&pipeline_id) {
None => self.compositor_proxy.send(CompositorMsg::ChangePageTitle(pipeline_id, None)),
Some(pipeline) => {
let ScriptControlChan(ref script_channel) = pipeline.script_chan;
script_channel.send(ConstellationControlMsg::GetTitle(pipeline_id)).unwrap();
}
}
}
fn handle_painter_ready_msg(&mut self, pipeline_id: PipelineId) {
debug!("Painter {:?} ready to send paint msg", pipeline_id);
// This message could originate from a pipeline in the navigation context or
// from a pending frame. The only time that we will grant paint permission is
// when the message originates from a pending frame or the current frame.
// Messages originating in the current frame are not navigations;
// they may come from a page load in a subframe.
if self.pipeline_is_in_current_frame(pipeline_id) {
self.create_compositor_layer_for_iframe_if_necessary(pipeline_id);
return;
}
// Find the pending frame change whose new pipeline id is pipeline_id.
// If it is not found, it simply means that this pipeline will not receive
// permission to paint.
let pending_index = self.pending_frames.iter().rposition(|frame_change| {
frame_change.after.pipeline.borrow().id == pipeline_id
});
match pending_index {
Some(pending_index) => {
let frame_change = self.pending_frames.swap_remove(pending_index);
let to_add = frame_change.after.clone();
// Create the next frame tree that will be given to the compositor
let next_frame_tree = if to_add.parent.borrow().is_some() {
// NOTE: work around borrowchk issues
self.current_frame().as_ref().unwrap().clone()
} else {
to_add.clone()
};
// If there are frames to revoke permission from, do so now.
match frame_change.before {
Some(revoke_id) if self.current_frame().is_some() => {
debug!("Constellation: revoking permission from {:?}", revoke_id);
let current_frame = self.current_frame().as_ref().unwrap();
let to_revoke = current_frame.find(revoke_id).expect(
"Constellation: pending frame change refers to an old \
frame not contained in the current frame. This is a bug");
for frame in to_revoke.iter() {
frame.pipeline.borrow().revoke_paint_permission();
}
// If to_add is not the root frame, then replace revoked_frame with it.
// This conveniently keeps scissor rect size intact.
// NOTE: work around borrowchk issue
let mut flag = false;
{
if to_add.parent.borrow().is_some() {
debug!("Constellation: replacing {:?} with {:?} in {:?}",
revoke_id, to_add.pipeline.borrow().id,
next_frame_tree.pipeline.borrow().id);
flag = true;
}
}
if flag {
next_frame_tree.replace_child(revoke_id, to_add);
}
}
_ => {
// Add to_add to parent's children, if it is not the root
let parent = &to_add.parent;
for parent in parent.borrow().iter() {
let subpage_id = to_add.pipeline.borrow().subpage_id()
.expect("Constellation:
Child frame's subpage id is None. This should be impossible.");
let rect = self.pending_sizes.remove(&(parent.id, subpage_id));
let parent = next_frame_tree.find(parent.id).expect(
"Constellation: pending frame has a parent frame that is not
active. This is a bug.");
parent.add_child(ChildFrameTree::new(to_add.clone(), rect));
}
}
}
self.send_frame_tree_and_grant_paint_permission(next_frame_tree.clone());
self.handle_evicted_frames_for_load_navigation(next_frame_tree,
frame_change.navigation_type);
},
None => (),
}
}
/// Called when the window is resized.
fn handle_resized_window_msg(&mut self, new_size: WindowSizeData) {
let mut already_seen = HashSet::new();
for frame_tree in self.current_frame().iter() {
debug!("constellation sending resize message to active frame");
let pipeline = &*frame_tree.pipeline.borrow();;
let ScriptControlChan(ref chan) = pipeline.script_chan;
let _ = chan.send(ConstellationControlMsg::Resize(pipeline.id, new_size));
already_seen.insert(pipeline.id);
}
for frame_tree in self.navigation_context.previous.iter()
.chain(self.navigation_context.next.iter()) {
let pipeline = &*frame_tree.pipeline.borrow();
if !already_seen.contains(&pipeline.id) {
debug!("constellation sending resize message to inactive frame");
let ScriptControlChan(ref chan) = pipeline.script_chan;
let _ = chan.send(ConstellationControlMsg::ResizeInactive(pipeline.id, new_size));
already_seen.insert(pipeline.id);
}
}
// If there are any pending outermost frames, then tell them to resize. (This is how the
// initial window size gets sent to the first page loaded, giving it permission to reflow.)
for change in self.pending_frames.iter() {
let frame_tree = &change.after;
if frame_tree.parent.borrow().is_none() {
debug!("constellation sending resize message to pending outer frame ({:?})",
frame_tree.pipeline.borrow().id);
let ScriptControlChan(ref chan) = frame_tree.pipeline.borrow().script_chan;
let _ = chan.send(ConstellationControlMsg::Resize(
frame_tree.pipeline.borrow().id, new_size));
}
}
self.window_size = new_size;
}
// Close all pipelines at and beneath a given frame
fn close_pipelines(&mut self, frame_tree: Rc<FrameTree>) {
// TODO(tkuehn): should only exit once per unique script task,
// and then that script task will handle sub-exits
for frame_tree in frame_tree.iter() {
frame_tree.pipeline.borrow().exit(PipelineExitType::PipelineOnly);
self.compositor_proxy.send(CompositorMsg::PaintTaskExited(frame_tree.pipeline.borrow().id));
self.pipelines.remove(&frame_tree.pipeline.borrow().id);
}
}
fn handle_evicted_frames(&mut self, evicted_frames: Vec<Rc<FrameTree>>) {
for frame_tree in evicted_frames.into_iter() {
if !self.navigation_context.contains(frame_tree.pipeline.borrow().id) {
self.close_pipelines(frame_tree);
} else {
let frames = frame_tree.children.borrow().iter()
.map(|child| child.frame_tree.clone()).collect();
self.handle_evicted_frames(frames);
}
}
}
fn handle_evicted_frames_for_load_navigation(&mut self,
frame_tree: Rc<FrameTree>,
navigation_type: NavigationType) {
// Don't call navigation_context.load() on a Navigate type (or None, as in the case of
// parsed iframes that finish loading).
match navigation_type {
NavigationType::Load => {
debug!("Evicting frames for NavigationType::Load");
let evicted_frames = self.navigation_context.load(frame_tree,
&mut *self.compositor_proxy);
self.handle_evicted_frames(evicted_frames);
}
_ => {}
}
}
// Grants a frame tree permission to paint; optionally updates navigation to reflect a new page
fn send_frame_tree_and_grant_paint_permission(&mut self, frame_tree: Rc<FrameTree>) {
debug!("Constellation sending SetFrameTree");
let (chan, port) = channel();
self.compositor_proxy.send(CompositorMsg::SetFrameTree(frame_tree.to_sendable(),
chan,
self.chan.clone()));
if port.recv().is_err() {
debug!("Compositor has discarded SetFrameTree");
return; // Our message has been discarded, probably shutting down.
}
let iter = frame_tree.iter();
for frame in iter {
frame.has_compositor_layer.set(true);
frame.pipeline.borrow().grant_paint_permission();
}
}
fn find_child_parent_pair_in_frame_tree(&self,
frame_tree: Rc<FrameTree>,
child_pipeline_id: PipelineId)
-> Option<(ChildFrameTree, Rc<FrameTree>)> {
for child in frame_tree.children.borrow().iter() {
let child_frame_tree = child.frame_tree.clone();
if child.frame_tree.pipeline.borrow().id == child_pipeline_id {
return Some((ChildFrameTree::new(child_frame_tree, child.rect),
frame_tree.clone()));
}
let result = self.find_child_parent_pair_in_frame_tree(child_frame_tree,
child_pipeline_id);
if result.is_some() {
return result;
}
}
None
}
fn create_compositor_layer_for_iframe_if_necessary(&mut self, pipeline_id: PipelineId) {
let current_frame_tree = match self.current_frame() {
&Some(ref tree) => tree.clone(),
&None => return,
};
let pair = self.find_child_parent_pair_in_frame_tree(current_frame_tree,
pipeline_id);
let (child, parent) = match pair {
Some(pair) => pair,
None => return,
};
if child.frame_tree.has_compositor_layer.get() {
child.frame_tree.pipeline.borrow().grant_paint_permission();
return;
}
let (chan, port) = channel();
self.compositor_proxy.send(CompositorMsg::CreateRootLayerForPipeline(
parent.pipeline.borrow().to_sendable(),
child.frame_tree.pipeline.borrow().to_sendable(),
child.rect,
chan));
match port.recv() {
Ok(()) => {
child.frame_tree.has_compositor_layer.set(true);
child.frame_tree.pipeline.borrow().grant_paint_permission();
}
Err(_) => {} // The message has been discarded, we are probably shutting down.
}
}
}<|fim▁end|> | next_pipeline_id: PipelineId(0),
next_frame_id: FrameId(0),
pending_frames: vec!(),
pending_sizes: HashMap::new(), |
<|file_name|>manager.py<|end_file_name|><|fim▁begin|># Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handles database requests from other nova services."""
import copy
from nova.api.ec2 import ec2utils
from nova import block_device
from nova.cells import rpcapi as cells_rpcapi
from nova.compute import api as compute_api
from nova.compute import rpcapi as compute_rpcapi
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova.conductor.tasks import live_migrate
from nova.db import base
from nova import exception
from nova.image import glance
from nova import manager
from nova import network
from nova.network.security_group import openstack_driver
from nova import notifications
from nova.objects import base as nova_object
from nova.objects import instance as instance_obj
from nova.objects import migration as migration_obj
from nova.openstack.common import excutils
from nova.openstack.common.gettextutils import _
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common.rpc import common as rpc_common
from nova.openstack.common import timeutils
from nova import quota
from nova.scheduler import rpcapi as scheduler_rpcapi
from nova.scheduler import utils as scheduler_utils
LOG = logging.getLogger(__name__)
# Instead of having a huge list of arguments to instance_update(), we just
# accept a dict of fields to update and use this whitelist to validate it.
allowed_updates = ['task_state', 'vm_state', 'expected_task_state',
'power_state', 'access_ip_v4', 'access_ip_v6',
'launched_at', 'terminated_at', 'host', 'node',
'memory_mb', 'vcpus', 'root_gb', 'ephemeral_gb',
'instance_type_id', 'root_device_name', 'launched_on',
'progress', 'vm_mode', 'default_ephemeral_device',
'default_swap_device', 'root_device_name',
'system_metadata', 'updated_at'
]
# Fields that we want to convert back into a datetime object.
datetime_fields = ['launched_at', 'terminated_at', 'updated_at']
class ConductorManager(manager.Manager):
"""Mission: Conduct things.
The methods in the base API for nova-conductor are various proxy operations
performed on behalf of the nova-compute service running on compute nodes.
Compute nodes are not allowed to directly access the database, so this set
of methods allows them to get specific work done without locally accessing
the database.
The nova-conductor service also exposes an API in the 'compute_task'
namespace. See the ComputeTaskManager class for details.
"""
RPC_API_VERSION = '1.58'
def __init__(self, *args, **kwargs):
super(ConductorManager, self).__init__(service_name='conductor',
*args, **kwargs)
self.security_group_api = (
openstack_driver.get_openstack_security_group_driver())
self._network_api = None
self._compute_api = None
self.compute_task_mgr = ComputeTaskManager()
self.quotas = quota.QUOTAS
self.cells_rpcapi = cells_rpcapi.CellsAPI()
def create_rpc_dispatcher(self, *args, **kwargs):
kwargs['additional_apis'] = [self.compute_task_mgr]
return super(ConductorManager, self).create_rpc_dispatcher(*args,
**kwargs)
@property
def network_api(self):
# NOTE(danms): We need to instantiate our network_api on first use
# to avoid the circular dependency that exists between our init
# and network_api's
if self._network_api is None:
self._network_api = network.API()
return self._network_api
@property
def compute_api(self):
if self._compute_api is None:
self._compute_api = compute_api.API()
return self._compute_api
def ping(self, context, arg):
# NOTE(russellb) This method can be removed in 2.0 of this API. It is
# now a part of the base rpc API.
return jsonutils.to_primitive({'service': 'conductor', 'arg': arg})
@rpc_common.client_exceptions(KeyError, ValueError,
exception.InvalidUUID,
exception.InstanceNotFound,
exception.UnexpectedTaskStateError)
def instance_update(self, context, instance_uuid,
updates, service=None):
for key, value in updates.iteritems():
if key not in allowed_updates:
LOG.error(_("Instance update attempted for "
"'%(key)s' on %(instance_uuid)s"),
{'key': key, 'instance_uuid': instance_uuid})
raise KeyError("unexpected update keyword '%s'" % key)
if key in datetime_fields and isinstance(value, basestring):
updates[key] = timeutils.parse_strtime(value)
old_ref, instance_ref = self.db.instance_update_and_get_original(
context, instance_uuid, updates)
notifications.send_update(context, old_ref, instance_ref, service)
return jsonutils.to_primitive(instance_ref)
@rpc_common.client_exceptions(exception.InstanceNotFound)
def instance_get(self, context, instance_id):
return jsonutils.to_primitive(
self.db.instance_get(context, instance_id))
@rpc_common.client_exceptions(exception.InstanceNotFound)
def instance_get_by_uuid(self, context, instance_uuid,
columns_to_join=None):
return jsonutils.to_primitive(
self.db.instance_get_by_uuid(context, instance_uuid,
columns_to_join))
# NOTE(hanlind): This method can be removed in v2.0 of the RPC API.
def instance_get_all(self, context):
return jsonutils.to_primitive(self.db.instance_get_all(context))
def instance_get_all_by_host(self, context, host, node=None,
columns_to_join=None):
if node is not None:
result = self.db.instance_get_all_by_host_and_node(
context.elevated(), host, node)
else:
result = self.db.instance_get_all_by_host(context.elevated(), host,
columns_to_join)
return jsonutils.to_primitive(result)
# NOTE(comstud): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
@rpc_common.client_exceptions(exception.MigrationNotFound)
def migration_get(self, context, migration_id):
migration_ref = self.db.migration_get(context.elevated(),
migration_id)
return jsonutils.to_primitive(migration_ref)
# NOTE(comstud): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
def migration_get_unconfirmed_by_dest_compute(self, context,
confirm_window,
dest_compute):
migrations = self.db.migration_get_unconfirmed_by_dest_compute(
context, confirm_window, dest_compute)
return jsonutils.to_primitive(migrations)
def migration_get_in_progress_by_host_and_node(self, context,
host, node):
migrations = self.db.migration_get_in_progress_by_host_and_node(
context, host, node)
return jsonutils.to_primitive(migrations)
# NOTE(comstud): This method can be removed in v2.0 of the RPC API.
def migration_create(self, context, instance, values):
values.update({'instance_uuid': instance['uuid'],
'source_compute': instance['host'],
'source_node': instance['node']})
migration_ref = self.db.migration_create(context.elevated(), values)
return jsonutils.to_primitive(migration_ref)
@rpc_common.client_exceptions(exception.MigrationNotFound)
def migration_update(self, context, migration, status):
migration_ref = self.db.migration_update(context.elevated(),
migration['id'],
{'status': status})
return jsonutils.to_primitive(migration_ref)
@rpc_common.client_exceptions(exception.AggregateHostExists)
def aggregate_host_add(self, context, aggregate, host):
host_ref = self.db.aggregate_host_add(context.elevated(),
aggregate['id'], host)
return jsonutils.to_primitive(host_ref)
@rpc_common.client_exceptions(exception.AggregateHostNotFound)
def aggregate_host_delete(self, context, aggregate, host):
self.db.aggregate_host_delete(context.elevated(),
aggregate['id'], host)
@rpc_common.client_exceptions(exception.AggregateNotFound)
def aggregate_get(self, context, aggregate_id):
aggregate = self.db.aggregate_get(context.elevated(), aggregate_id)
return jsonutils.to_primitive(aggregate)
def aggregate_get_by_host(self, context, host, key=None):
aggregates = self.db.aggregate_get_by_host(context.elevated(),
host, key)
return jsonutils.to_primitive(aggregates)
def aggregate_metadata_add(self, context, aggregate, metadata,
set_delete=False):
new_metadata = self.db.aggregate_metadata_add(context.elevated(),
aggregate['id'],
metadata, set_delete)
return jsonutils.to_primitive(new_metadata)
@rpc_common.client_exceptions(exception.AggregateMetadataNotFound)
def aggregate_metadata_delete(self, context, aggregate, key):
self.db.aggregate_metadata_delete(context.elevated(),
aggregate['id'], key)
def aggregate_metadata_get_by_host(self, context, host,
key='availability_zone'):
result = self.db.aggregate_metadata_get_by_host(context, host, key)
return jsonutils.to_primitive(result)
def bw_usage_update(self, context, uuid, mac, start_period,
bw_in=None, bw_out=None,
last_ctr_in=None, last_ctr_out=None,
last_refreshed=None,
update_cells=True):
if [bw_in, bw_out, last_ctr_in, last_ctr_out].count(None) != 4:
self.db.bw_usage_update(context, uuid, mac, start_period,
bw_in, bw_out, last_ctr_in, last_ctr_out,
last_refreshed,
update_cells=update_cells)
usage = self.db.bw_usage_get(context, uuid, start_period, mac)
return jsonutils.to_primitive(usage)
# NOTE(russellb) This method can be removed in 2.0 of this API. It is
# deprecated in favor of the method in the base API.
def get_backdoor_port(self, context):
return self.backdoor_port
def security_group_get_by_instance(self, context, instance):
group = self.db.security_group_get_by_instance(context,
instance['uuid'])
return jsonutils.to_primitive(group)
def security_group_rule_get_by_security_group(self, context, secgroup):
rules = self.db.security_group_rule_get_by_security_group(
context, secgroup['id'])
return jsonutils.to_primitive(rules, max_depth=4)
def provider_fw_rule_get_all(self, context):
rules = self.db.provider_fw_rule_get_all(context)
return jsonutils.to_primitive(rules)
def agent_build_get_by_triple(self, context, hypervisor, os, architecture):
info = self.db.agent_build_get_by_triple(context, hypervisor, os,
architecture)
return jsonutils.to_primitive(info)
def block_device_mapping_update_or_create(self, context, values,
create=None):
if create is None:
bdm = self.db.block_device_mapping_update_or_create(context,
values)
elif create is True:
bdm = self.db.block_device_mapping_create(context, values)
else:
bdm = self.db.block_device_mapping_update(context,
values['id'],
values)
# NOTE:comstud): 'bdm' is always in the new format, so we
# account for this in cells/messaging.py
self.cells_rpcapi.bdm_update_or_create_at_top(context, bdm,
create=create)
def block_device_mapping_get_all_by_instance(self, context, instance,
legacy=True):
bdms = self.db.block_device_mapping_get_all_by_instance(
context, instance['uuid'])
if legacy:
bdms = block_device.legacy_mapping(bdms)
return jsonutils.to_primitive(bdms)
def block_device_mapping_destroy(self, context, bdms=None,
instance=None, volume_id=None,
device_name=None):
if bdms is not None:
for bdm in bdms:
self.db.block_device_mapping_destroy(context, bdm['id'])
# NOTE(comstud): bdm['id'] will be different in API cell,
# so we must try to destroy by device_name or volume_id.
# We need an instance_uuid in order to do this properly,
# too.
# I hope to clean a lot of this up in the object
# implementation.
instance_uuid = (bdm['instance_uuid'] or
(instance and instance['uuid']))
if not instance_uuid:
continue
# Better to be safe than sorry. device_name is not
# NULLable, however it could be an empty string.
if bdm['device_name']:
self.cells_rpcapi.bdm_destroy_at_top(
context, instance_uuid,
device_name=bdm['device_name'])
elif bdm['volume_id']:
self.cells_rpcapi.bdm_destroy_at_top(
context, instance_uuid,
volume_id=bdm['volume_id'])
elif instance is not None and volume_id is not None:
self.db.block_device_mapping_destroy_by_instance_and_volume(
context, instance['uuid'], volume_id)
self.cells_rpcapi.bdm_destroy_at_top(
context, instance['uuid'], volume_id=volume_id)
elif instance is not None and device_name is not None:
self.db.block_device_mapping_destroy_by_instance_and_device(
context, instance['uuid'], device_name)
self.cells_rpcapi.bdm_destroy_at_top(
context, instance['uuid'], device_name=device_name)
else:
# NOTE(danms): This shouldn't happen
raise exception.Invalid(_("Invalid block_device_mapping_destroy"
" invocation"))
def instance_get_all_by_filters(self, context, filters, sort_key,
sort_dir, columns_to_join=None):
result = self.db.instance_get_all_by_filters(
context, filters, sort_key, sort_dir,
columns_to_join=columns_to_join)
return jsonutils.to_primitive(result)
# NOTE(hanlind): This method can be removed in v2.0 of the RPC API.
def instance_get_all_hung_in_rebooting(self, context, timeout):
result = self.db.instance_get_all_hung_in_rebooting(context, timeout)
return jsonutils.to_primitive(result)
def instance_get_active_by_window(self, context, begin, end=None,
project_id=None, host=None):
# Unused, but cannot remove until major RPC version bump
result = self.db.instance_get_active_by_window(context, begin, end,
project_id, host)
return jsonutils.to_primitive(result)
def instance_get_active_by_window_joined(self, context, begin, end=None,
project_id=None, host=None):
result = self.db.instance_get_active_by_window_joined(
context, begin, end, project_id, host)
return jsonutils.to_primitive(result)
def instance_destroy(self, context, instance):
self.db.instance_destroy(context, instance['uuid'])
def instance_info_cache_delete(self, context, instance):
self.db.instance_info_cache_delete(context, instance['uuid'])
def instance_info_cache_update(self, context, instance, values):
self.db.instance_info_cache_update(context, instance['uuid'],
values)
def instance_type_get(self, context, instance_type_id):
result = self.db.flavor_get(context, instance_type_id)
return jsonutils.to_primitive(result)
def instance_fault_create(self, context, values):
result = self.db.instance_fault_create(context, values)
return jsonutils.to_primitive(result)
# NOTE(kerrin): This method can be removed in v2.0 of the RPC API.
def vol_get_usage_by_time(self, context, start_time):
result = self.db.vol_get_usage_by_time(context, start_time)
return jsonutils.to_primitive(result)
# NOTE(kerrin): The last_refreshed argument is unused by this method
# and can be removed in v2.0 of the RPC API.
def vol_usage_update(self, context, vol_id, rd_req, rd_bytes, wr_req,
wr_bytes, instance, last_refreshed=None,
update_totals=False):
vol_usage = self.db.vol_usage_update(context, vol_id,
rd_req, rd_bytes,
wr_req, wr_bytes,
instance['uuid'],
instance['project_id'],
instance['user_id'],
instance['availability_zone'],
update_totals)
# We have just updated the database, so send the notification now
self.notifier.info(context, 'volume.usage',
compute_utils.usage_volume_info(vol_usage))
@rpc_common.client_exceptions(exception.ComputeHostNotFound,
exception.HostBinaryNotFound)
def service_get_all_by(self, context, topic=None, host=None, binary=None):
if not any((topic, host, binary)):
result = self.db.service_get_all(context)
elif all((topic, host)):
if topic == 'compute':
result = self.db.service_get_by_compute_host(context, host)
# FIXME(comstud) Potentially remove this on bump to v2.0
result = [result]
else:
result = self.db.service_get_by_host_and_topic(context,
host, topic)
elif all((host, binary)):
result = self.db.service_get_by_args(context, host, binary)
elif topic:
result = self.db.service_get_all_by_topic(context, topic)
elif host:
result = self.db.service_get_all_by_host(context, host)
return jsonutils.to_primitive(result)
def action_event_start(self, context, values):
evt = self.db.action_event_start(context, values)
return jsonutils.to_primitive(evt)
def action_event_finish(self, context, values):
evt = self.db.action_event_finish(context, values)
return jsonutils.to_primitive(evt)
def service_create(self, context, values):
svc = self.db.service_create(context, values)
return jsonutils.to_primitive(svc)
@rpc_common.client_exceptions(exception.ServiceNotFound)
def service_destroy(self, context, service_id):
self.db.service_destroy(context, service_id)
def compute_node_create(self, context, values):
result = self.db.compute_node_create(context, values)
return jsonutils.to_primitive(result)
def compute_node_update(self, context, node, values, prune_stats=False):
result = self.db.compute_node_update(context, node['id'], values,
prune_stats)
return jsonutils.to_primitive(result)
def compute_node_delete(self, context, node):
result = self.db.compute_node_delete(context, node['id'])
return jsonutils.to_primitive(result)
@rpc_common.client_exceptions(exception.ServiceNotFound)
def service_update(self, context, service, values):
svc = self.db.service_update(context, service['id'], values)
return jsonutils.to_primitive(svc)
def task_log_get(self, context, task_name, begin, end, host, state=None):
result = self.db.task_log_get(context, task_name, begin, end, host,
state)
return jsonutils.to_primitive(result)
def task_log_begin_task(self, context, task_name, begin, end, host,
task_items=None, message=None):
result = self.db.task_log_begin_task(context.elevated(), task_name,
begin, end, host, task_items,
message)
return jsonutils.to_primitive(result)
def task_log_end_task(self, context, task_name, begin, end, host,
errors, message=None):
result = self.db.task_log_end_task(context.elevated(), task_name,
begin, end, host, errors, message)
return jsonutils.to_primitive(result)
def notify_usage_exists(self, context, instance, current_period=False,
ignore_missing_network_data=True,
system_metadata=None, extra_usage_info=None):
compute_utils.notify_usage_exists(self.notifier, context, instance,
current_period,
ignore_missing_network_data,
system_metadata, extra_usage_info)
def security_groups_trigger_handler(self, context, event, args):
self.security_group_api.trigger_handler(event, context, *args)
def security_groups_trigger_members_refresh(self, context, group_ids):
self.security_group_api.trigger_members_refresh(context, group_ids)
def network_migrate_instance_start(self, context, instance, migration):
self.network_api.migrate_instance_start(context, instance, migration)
def network_migrate_instance_finish(self, context, instance, migration):
self.network_api.migrate_instance_finish(context, instance, migration)
def quota_commit(self, context, reservations, project_id=None,
user_id=None):
quota.QUOTAS.commit(context, reservations, project_id=project_id,
user_id=user_id)
def quota_rollback(self, context, reservations, project_id=None,
user_id=None):
quota.QUOTAS.rollback(context, reservations, project_id=project_id,
user_id=user_id)
def get_ec2_ids(self, context, instance):
ec2_ids = {}
ec2_ids['instance-id'] = ec2utils.id_to_ec2_inst_id(instance['uuid'])
ec2_ids['ami-id'] = ec2utils.glance_id_to_ec2_id(context,
instance['image_ref'])
for image_type in ['kernel', 'ramdisk']:
if '%s_id' % image_type in instance:
image_id = instance['%s_id' % image_type]
ec2_image_type = ec2utils.image_type(image_type)
ec2_id = ec2utils.glance_id_to_ec2_id(context, image_id,
ec2_image_type)
ec2_ids['%s-id' % image_type] = ec2_id
return ec2_ids
# NOTE(danms): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
def compute_stop(self, context, instance, do_cast=True):
# NOTE(mriedem): Clients using an interface before 1.43 will be sending
# dicts so we need to handle that here since compute/api::stop()
# requires an object.
if isinstance(instance, dict):
instance = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), instance)
self.compute_api.stop(context, instance, do_cast)
# NOTE(comstud): This method is now deprecated and can be removed in
# version v2.0 of the RPC API
def compute_confirm_resize(self, context, instance, migration_ref):
if isinstance(instance, dict):
attrs = ['metadata', 'system_metadata', 'info_cache',
'security_groups']
instance = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), instance,
expected_attrs=attrs)
if isinstance(migration_ref, dict):
migration_ref = migration_obj.Migration._from_db_object(
context.elevated(), migration_ref)
self.compute_api.confirm_resize(context, instance,
migration=migration_ref)
def compute_unrescue(self, context, instance):
self.compute_api.unrescue(context, instance)
def _object_dispatch(self, target, method, context, args, kwargs):
"""Dispatch a call to an object method.
This ensures that object methods get called and any exception
that is raised gets wrapped in a ClientException for forwarding
back to the caller (without spamming the conductor logs).
"""
try:
# NOTE(danms): Keep the getattr inside the try block since
# a missing method is really a client problem
return getattr(target, method)(context, *args, **kwargs)
except Exception:
raise rpc_common.ClientException()
def object_class_action(self, context, objname, objmethod,
objver, args, kwargs):
"""Perform a classmethod action on an object."""
objclass = nova_object.NovaObject.obj_class_from_name(objname,
objver)
return self._object_dispatch(objclass, objmethod, context,
args, kwargs)
def object_action(self, context, objinst, objmethod, args, kwargs):
"""Perform an action on an object."""
oldobj = copy.copy(objinst)
result = self._object_dispatch(objinst, objmethod, context,
args, kwargs)
updates = dict()
# NOTE(danms): Diff the object with the one passed to us and
# generate a list of changes to forward back
for field in objinst.fields:
if not objinst.obj_attr_is_set(field):
# Avoid demand-loading anything
continue
if (not oldobj.obj_attr_is_set(field) or
oldobj[field] != objinst[field]):
updates[field] = objinst._attr_to_primitive(field)
# This is safe since a field named this would conflict with the
# method anyway
updates['obj_what_changed'] = objinst.obj_what_changed()
return updates, result
# NOTE(danms): This method is now deprecated and can be removed in
# v2.0 of the RPC API
def compute_reboot(self, context, instance, reboot_type):
self.compute_api.reboot(context, instance, reboot_type)
class ComputeTaskManager(base.Base):
"""Namespace for compute methods.
This class presents an rpc API for nova-conductor under the 'compute_task'
namespace. The methods here are compute operations that are invoked
by the API service. These methods see the operation to completion, which
may involve coordinating activities on multiple compute nodes.
"""
RPC_API_NAMESPACE = 'compute_task'
RPC_API_VERSION = '1.6'
def __init__(self):
super(ComputeTaskManager, self).__init__()
self.compute_rpcapi = compute_rpcapi.ComputeAPI()
self.scheduler_rpcapi = scheduler_rpcapi.SchedulerAPI()
self.image_service = glance.get_default_image_service()
self.quotas = quota.QUOTAS
@rpc_common.client_exceptions(exception.NoValidHost,
exception.ComputeServiceUnavailable,
exception.InvalidHypervisorType,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.MigrationPreCheckError)
def migrate_server(self, context, instance, scheduler_hint, live, rebuild,
flavor, block_migration, disk_over_commit, reservations=None):
if instance and not isinstance(instance, instance_obj.Instance):
# NOTE(danms): Until v2 of the RPC API, we need to tolerate
# old-world instance objects here
attrs = ['metadata', 'system_metadata', 'info_cache',
'security_groups']
instance = instance_obj.Instance._from_db_object(
context, instance_obj.Instance(), instance,
expected_attrs=attrs)
if live and not rebuild and not flavor:
self._live_migrate(context, instance, scheduler_hint,
block_migration, disk_over_commit)
elif not live and not rebuild and flavor:
instance_uuid = instance['uuid']
with compute_utils.EventReporter(context, ConductorManager(),
'cold_migrate', instance_uuid):
self._cold_migrate(context, instance, flavor,
scheduler_hint['filter_properties'],
reservations)
else:
raise NotImplementedError()
def _cold_migrate(self, context, instance, flavor, filter_properties,
reservations):
image_ref = instance.image_ref
image = compute_utils.get_image_metadata(
context, self.image_service, image_ref, instance)
request_spec = scheduler_utils.build_request_spec(
context, image, [instance], instance_type=flavor)
try:
hosts = self.scheduler_rpcapi.select_destinations(
context, request_spec, filter_properties)
host_state = hosts[0]
except exception.NoValidHost as ex:
vm_state = instance['vm_state']
if not vm_state:
vm_state = vm_states.ACTIVE
updates = {'vm_state': vm_state, 'task_state': None}
self._set_vm_state_and_notify(context, 'migrate_server',
updates, ex, request_spec)
if reservations:
self.quotas.rollback(context, reservations)
LOG.warning(_("No valid host found for cold migrate"))
return
try:
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
# context is not serializable
filter_properties.pop('context', None)
# TODO(timello): originally, instance_type in request_spec
# on compute.api.resize does not have 'extra_specs', so we
# remove it for now to keep tests backward compatibility.
request_spec['instance_type'].pop('extra_specs')
(host, node) = (host_state['host'], host_state['nodename'])
self.compute_rpcapi.prep_resize(
context, image, instance,
flavor, host,
reservations, request_spec=request_spec,
filter_properties=filter_properties, node=node)
except Exception as ex:
with excutils.save_and_reraise_exception():
updates = {'vm_state': vm_states.ERROR,
'task_state': None}
self._set_vm_state_and_notify(context, 'migrate_server',
updates, ex, request_spec)
if reservations:
self.quotas.rollback(context, reservations)
def _set_vm_state_and_notify(self, context, method, updates, ex,
request_spec):
scheduler_utils.set_vm_state_and_notify(
context, 'compute_task', method, updates,
ex, request_spec, self.db)
def _live_migrate(self, context, instance, scheduler_hint,
block_migration, disk_over_commit):
destination = scheduler_hint.get("host")
try:
live_migrate.execute(context, instance, destination,
block_migration, disk_over_commit)
except (exception.NoValidHost,
exception.ComputeServiceUnavailable,
exception.InvalidHypervisorType,
exception.UnableToMigrateToSelf,
exception.DestinationHypervisorTooOld,
exception.InvalidLocalStorage,
exception.InvalidSharedStorage,
exception.MigrationPreCheckError) as ex:
with excutils.save_and_reraise_exception():
#TODO(johngarbutt) - eventually need instance actions here
request_spec = {'instance_properties': {
'uuid': instance['uuid'], },
}
scheduler_utils.set_vm_state_and_notify(context,
'compute_task', 'migrate_server',
dict(vm_state=instance['vm_state'],
task_state=None,
expected_task_state=task_states.MIGRATING,),
ex, request_spec, self.db)
except Exception as ex:
with excutils.save_and_reraise_exception():
request_spec = {'instance_properties': {
'uuid': instance['uuid'], },
}
scheduler_utils.set_vm_state_and_notify(context,
'compute_task', 'migrate_server',
{'vm_state': vm_states.ERROR},
ex, request_spec, self.db)
def build_instances(self, context, instances, image, filter_properties,
admin_password, injected_files, requested_networks,<|fim▁hole|> # NOTE(alaski): For compatibility until a new scheduler method is used.
request_spec.update({'block_device_mapping': block_device_mapping,
'security_group': security_groups})
self.scheduler_rpcapi.run_instance(context, request_spec=request_spec,
admin_password=admin_password, injected_files=injected_files,
requested_networks=requested_networks, is_first_time=True,
filter_properties=filter_properties,
legacy_bdm_in_spec=legacy_bdm)
def _get_image(self, context, image_id):
if not image_id:
return None
return self.image_service.show(context, image_id)
def _delete_image(self, context, image_id):
(image_service, image_id) = glance.get_remote_image_service(context,
image_id)
return image_service.delete(context, image_id)
def _schedule_instances(self, context, image, filter_properties,
*instances):
request_spec = scheduler_utils.build_request_spec(context, image,
instances)
# dict(host='', nodename='', limits='')
hosts = self.scheduler_rpcapi.select_destinations(context,
request_spec, filter_properties)
return hosts
def unshelve_instance(self, context, instance):
sys_meta = instance.system_metadata
if instance.vm_state == vm_states.SHELVED:
instance.task_state = task_states.POWERING_ON
instance.save(expected_task_state=task_states.UNSHELVING)
self.compute_rpcapi.start_instance(context, instance)
snapshot_id = sys_meta.get('shelved_image_id')
if snapshot_id:
self._delete_image(context, snapshot_id)
elif instance.vm_state == vm_states.SHELVED_OFFLOADED:
try:
with compute_utils.EventReporter(context, self.db,
'get_image_info', instance.uuid):
image = self._get_image(context,
sys_meta['shelved_image_id'])
except exception.ImageNotFound:
with excutils.save_and_reraise_exception():
LOG.error(_('Unshelve attempted but vm_state not SHELVED '
'or SHELVED_OFFLOADED'), instance=instance)
instance.vm_state = vm_states.ERROR
instance.save()
hosts = self._schedule_instances(context, image, [], instance)
host = hosts.pop(0)['host']
self.compute_rpcapi.unshelve_instance(context, instance, host,
image)
else:
LOG.error(_('Unshelve attempted but vm_state not SHELVED or '
'SHELVED_OFFLOADED'), instance=instance)
instance.vm_state = vm_states.ERROR
instance.save()
return
for key in ['shelved_at', 'shelved_image_id', 'shelved_host']:
if key in sys_meta:
del(sys_meta[key])
instance.system_metadata = sys_meta
instance.save()<|fim▁end|> | security_groups, block_device_mapping, legacy_bdm=True):
request_spec = scheduler_utils.build_request_spec(context, image,
instances) |
<|file_name|>threeJS.js<|end_file_name|><|fim▁begin|>///@INFO: UNCOMMON
// This Component shows the possibility of using another Render Engine within WebGLStudio.
// The idea here is to create a component that calls the other render engine renderer during my rendering method
function ThreeJS( o )
{
this.enabled = true;
this.autoclear = true; //clears the scene on start
this._code = ThreeJS.default_code;
if(global.gl)
{
if( typeof(THREE) == "undefined")
this.loadLibrary( function() { this.setupContext(); } );
else
this.setupContext();
}
this._script = new LScript();
//maybe add function to retrieve texture
this._script.catch_exceptions = false;
if(o)
this.configure(o);
}
ThreeJS.prototype.setupContext = function()
{
if(this._engine)
return;
if( typeof(THREE) == "undefined")
{
console.error("ThreeJS library not loaded");
return;
}
if( !THREE.Scene )
{
console.error("ThreeJS error parsing library");
return; //this could happen if there was an error parsing THREE.JS
}
//GLOBAL VARS
this._engine = {
component: this,
node: this._root,
scene: new THREE.Scene(),
camera: new THREE.PerspectiveCamera( 70, gl.canvas.width / gl.canvas.height, 1, 1000 ),
renderer: new THREE.WebGLRenderer( { canvas: gl.canvas, context: gl } ),
root: new THREE.Object3D(),
ThreeJS: this.constructor
};
this._engine.scene.add( this._engine.root );
}
ThreeJS.default_code = "//renderer, camera, scene, already created, they are globals.\n//use root as your base Object3D node if you want to use the scene manipulators.\n\nthis.start = function() {\n}\n\nthis.render = function(){\n}\n\nthis.update = function(dt){\n}\n";
ThreeJS.library_url = "http://threejs.org/build/three.js";
Object.defineProperty( ThreeJS.prototype, "code", {
set: function(v)
{
this._code = v;
this.processCode();
},
get: function() { return this._code; },
enumerable: true
});
ThreeJS["@code"] = { widget: "code", allow_inline: false };
ThreeJS.prototype.onAddedToScene = function( scene )
{
LEvent.bind( ONE.Renderer, "renderInstances", this.onEvent, this );
LEvent.bind( scene, "start", this.onEvent, this );
LEvent.bind( scene, "update", this.onEvent, this );
LEvent.bind( scene, "finish", this.onEvent, this );
this.processCode();
}
ThreeJS.prototype.clearScene = function()
{
if(!this._engine)
return;
//remove inside root
var root = this._engine.root;
for( var i = root.children.length - 1; i >= 0; i--)
root.remove( root.children[i] );
//remove inside scene but not root
root = this._engine.scene;
for( var i = root.children.length - 1; i >= 0; i--)
if( root.children[i] != this._engine.root )
root.remove( root.children[i] );
}
<|fim▁hole|>{
LEvent.unbind( ONE.Renderer, "renderInstances", this.onEvent, this );
LEvent.unbindAll( scene, this );
//clear scene
if(this.autoclear)
this.clearScene();
}
ThreeJS.prototype.onEvent = function( e, param )
{
if( !this.enabled || !this._engine )
return;
var engine = this._engine;
if(e == "start")
{
//clear scene?
if(this.autoclear)
this.clearScene();
if(this._script)
this._script.callMethod( "start" );
}
else if(e == "renderInstances")
{
//copy camera info so both cameras matches
var current_camera = ONE.Renderer._current_camera;
engine.camera.fov = current_camera.fov;
engine.camera.aspect = current_camera._final_aspect;
engine.camera.near = current_camera.near;
engine.camera.far = current_camera.far;
engine.camera.updateProjectionMatrix()
engine.camera.position.fromArray( current_camera._global_eye );
engine.camera.lookAt( new THREE.Vector3( current_camera._global_center[0], current_camera._global_center[1], current_camera._global_center[2] ) );
//copy the root info
ThreeJS.copyTransform( this._root, engine.root );
//render using ThreeJS
engine.renderer.setSize( gl.viewport_data[2], gl.viewport_data[3] );
if( engine.renderer.resetGLState )
engine.renderer.resetGLState();
else if( engine.renderer.state.reset )
engine.renderer.state.reset();
if(this._script)
this._script.callMethod( "render" );
else
engine.renderer.render( engine.scene, engine.camera ); //render the scene
//reset GL here?
//read the root position and update the node?
}
else if(e == "update")
{
if(this._script)
this._script.callMethod( "update", param );
else
engine.scene.update( param );
}
else if(e == "finish")
{
if(this._script)
this._script.callMethod( "finish" );
}
}
/*
ThreeJS.prototype.getCode = function()
{
return this.code;
}
ThreeJS.prototype.setCode = function( code, skip_events )
{
this.code = code;
this.processCode( skip_events );
}
*/
ThreeJS.copyTransform = function( a, b )
{
//litescene to threejs
if( a.constructor === ONE.SceneNode )
{
var global_position = vec3.create();
if(a.transform)
a.transform.getGlobalPosition( global_position );
b.position.set( global_position[0], global_position[1], global_position[2] );
//rotation
var global_rotation = quat.create();
if(a.transform)
a.transform.getGlobalRotation( global_rotation );
b.quaternion.set( global_rotation[0], global_rotation[1], global_rotation[2], global_rotation[3] );
//scale
var global_scale = vec3.fromValues(1,1,1);
if(a.transform)
a.transform.getGlobalScale( global_scale );
b.scale.set( global_scale[0], global_scale[1], global_scale[2] );
}
if( a.constructor === ONE.Transform )
{
var global_position = vec3.create();
a.getGlobalPosition( global_position );
b.position.set( global_position[0], global_position[1], global_position[2] );
//rotation
var global_rotation = quat.create();
a.getGlobalRotation( global_rotation );
b.quaternion.set( global_rotation[0], global_rotation[1], global_rotation[2], global_rotation[3] );
//scale
var global_scale = vec3.fromValues(1,1,1);
a.getGlobalScale( global_scale );
b.scale.set( global_scale[0], global_scale[1], global_scale[2] );
}
else //threejs to litescene
{
if( b.constructor == ONE.Transform )
b.fromMatrix( a.matrixWorld );
else if( b.constructor == ONE.SceneNode && b.transform )
b.transform.fromMatrix( a.matrixWorld );
}
}
ThreeJS.prototype.loadLibrary = function( on_complete )
{
if( typeof(THREE) !== "undefined" )
{
if(on_complete)
on_complete.call(this);
return;
}
if( this._loading )
{
LEvent.bind( this, "threejs_loaded", on_complete, this );
return;
}
if(this._loaded)
{
if(on_complete)
on_complete.call(this);
return;
}
this._loading = true;
var that = this;
ONE.Network.requestScript( ThreeJS.library_url, function(){
console.log("ThreeJS library loaded");
that._loading = false;
that._loaded = true;
LEvent.trigger( that, "threejs_loaded" );
LEvent.unbindAllEvent( that, "threejs_loaded" );
if(!that._engine)
that.setupContext();
});
}
ThreeJS.prototype.processCode = function( skip_events )
{
if(!this._script || !this._root || !this._root.scene )
return;
this._script.code = this.code;
//force threejs inclusion
if( typeof(THREE) == "undefined")
{
this.loadLibrary( function() {
this.processCode();
});
return;
}
if(!this._engine)
this.setupContext();
if(this._root && !ONE.Script.block_execution )
{
//compiles and executes the context
return this._script.compile( this._engine, true );
}
return true;
}
ONE.registerComponent( ThreeJS );<|fim▁end|> | ThreeJS.prototype.onRemovedFromScene = function( scene )
|
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>trait Shape {
fn area(&self) -> i32;
fn is_shape(&self) -> bool {
true
}
}
struct Square {
side_length: i32,
}
impl Shape for Square {
fn area(&self) -> i32 {
self.side_length * self.side_length
}
}
fn main() {
let square = Square { side_length: 2 };<|fim▁hole|> println!("The square's area is: {}", square.area());
}
#[cfg(test)]
mod tests {
use super::{Shape, Square};
#[test]
fn area() {
let square = Square { side_length: 2 };
assert_eq!(square.area(), 4);
}
#[test]
fn is_shape() {
let square = Square { side_length: 2 };
assert!(square.is_shape())
}
}<|fim▁end|> | |
<|file_name|>EntityStatePeSubsystemType.py<|end_file_name|><|fim▁begin|># Copyright 2016 Casey Jaymes<|fim▁hole|># PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
import logging
from scap.model.oval_5 import PE_SUBSYSTEM_ENUMERATION
from scap.model.oval_5.defs.EntityStateType import EntityStateType
logger = logging.getLogger(__name__)
class EntityStatePeSubsystemType(EntityStateType):
MODEL_MAP = {
}
def get_value_enum(self):
return PE_SUBSYSTEM_ENUMERATION<|fim▁end|> |
# This file is part of PySCAP.
# |
<|file_name|>apps.py<|end_file_name|><|fim▁begin|>from biohub.core.plugins import PluginConfig
<|fim▁hole|> name = 'tests.core.plugins.test'
title = ''
author = ''
description = ''<|fim▁end|> |
class TestConfig(PluginConfig):
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect, HttpResponse
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.views.decorators.csrf import csrf_exempt
from subscriber.models import Consumer, ConsumerType, Recharge, TotalRecharge, ACL
from product.models import Product
from voice_records.models import VoiceRecord, VoiceReg
from sms.models import SMSPayment
# from local_lib.v3 import is_number, is_float
from local_lib.v3 import is_number, is_float, is_bangladeshi_number, is_japanese_number, send_sms
from transaction.models import Transaction, ProductsInTransaction, BuyerSellerAccount, dueTransaction
from shop_inventory.models import Inventory, BuySellProfitInventoryIndividual, BuySellProfitInventory
from transcriber_management.models import Transcriber, TranscriberInTranscription, FailedTranscription
import datetime
from django.db.models import Q
from django.contrib.auth.models import User
from django.contrib.sessions.backends.db import SessionStore
from django.db.models import Count
@csrf_exempt
def login_page(request):
return render(request, 'pages/login.html')
@csrf_exempt
def login_auth(request):
postdata = request.POST
print(postdata)
if 'username' and 'password' in postdata:
print(postdata['username'])
login_username = postdata['username']
print(postdata['password'])
if ACL.objects.filter(loginID=postdata['username'][-9:]).exists():
login_username = login_username[-9:]
else:
login_username = login_username
user = authenticate(username=login_username, password=postdata['password'])
if user is not None:
if user.is_active:
login(request, user)
request.session['user'] = login_username
if user.is_superuser:
res = redirect('/admin')
else:
res = redirect('/')
else:
res = render(request, 'pages/login.html',
{'wrong': True,
'text': 'The password is valid, but the account has been disabled!'})
else:
res = render(request, 'pages/login.html',
{'wrong': True,
'text': 'The username and password you have entered is not correct. Please retry'})
else:
res = render(request, 'pages/login.html', {'wrong': False})
res['Access-Control-Allow-Origin'] = "*"
res['Access-Control-Allow-Headers'] = "Origin, X-Requested-With, Content-Type, Accept"
res['Access-Control-Allow-Methods'] = "PUT, GET, POST, DELETE, OPTIONS"
return res
def logout_now(request):
logout(request)
return render(request, 'pages/login.html')
@login_required(login_url='/login/')
def home(request):
transcriber_name = request.session['user']
print request.session['user']
if ACL.objects.filter(loginID=transcriber_name).exists():
login_user = ACL.objects.get(loginID=transcriber_name)
print(login_user.loginUser.name)
transcriber_name = login_user.loginUser.name
if login_user.loginUser.type.type_name == 'Distributor':
if login_user.loginUser.number_of_child == 'CHANGED !!!':
return render(request, 'pages/Distributor/index.html', {'transcriber_name': transcriber_name})
else:
return redirect('/change_password/')
elif login_user.loginUser.type.type_name == 'SR':
if login_user.loginUser.number_of_child == 'CHANGED !!!':
return render(request, 'pages/SR/index.html', {'transcriber_name': transcriber_name})
else:
return redirect('/change_password/')
elif login_user.loginUser.type.type_name == 'Seller':
if login_user.loginUser.number_of_child == 'CHANGED !!!':
return render(request, 'pages/Shop/index.html', {'transcriber_name': transcriber_name})
else:
return redirect('/change_password/')
elif login_user.loginUser.type.type_name == 'Buyer':
if login_user.loginUser.number_of_child == 'CHANGED !!!':
return render(request, 'pages/Consumer/index.html', {'transcriber_name': transcriber_name})
else:
return redirect('/change_password/')
else:
number_of_reg_calls = VoiceReg.objects.filter().count()
number_of_transaction_calls = VoiceRecord.objects.filter().count()
total = number_of_reg_calls + number_of_transaction_calls
if total > 0:
reg_call_percentage = (number_of_reg_calls / float(total)) * 100
transaction_call_percentage = (number_of_transaction_calls / float(total)) * 100
else:
transaction_call_percentage = 0
reg_call_percentage = 0
today_month = datetime.date.today().month
today_year = datetime.date.today().year
count = 1
data_2 = ''
data_3 = ''
data_4 = ''
data_5 = ''
data_6 = ''
max = 0
max_table_2 = 0
total_sell = VoiceRecord.objects.filter(purpose='sell').count()
total_buy = VoiceRecord.objects.filter(purpose='buy').count()
total_money_transaction = SMSPayment.objects.filter().count()
total_for_chart2 = number_of_reg_calls + number_of_transaction_calls
if total_for_chart2 > 0:
sell_percentage = (total_sell / float(total_for_chart2)) * 100
buy_percentage = (total_buy / float(total_for_chart2)) * 100
money_transaction_percentage = (total_money_transaction / float(total_for_chart2)) * 100
else:
sell_percentage = 0
buy_percentage = 0
money_transaction_percentage = 0
while count < 32:
total_call_that_day = VoiceRecord.objects.filter(DateAdded__month=today_month,
DateAdded__year=today_year, DateAdded__day=count).count()
total_reg_that_day = VoiceReg.objects.filter(DateAdded__month=today_month,
DateAdded__year=today_year, DateAdded__day=count).count()
if max < total_call_that_day:
max = total_call_that_day + 2
if max < total_reg_that_day:
max = total_reg_that_day + 2
data_2 += '[gd(%s, %s, %s), %s],' % (today_year, today_month, count, total_call_that_day)
data_3 += '[gd(%s, %s, %s), %s],' % (today_year, today_month, count, total_reg_that_day)
total_buy_that_day = VoiceRecord.objects.filter(DateAdded__month=today_month,
DateAdded__year=today_year,
DateAdded__day=count,
purpose='buy').count()
total_sell_that_day = VoiceRecord.objects.filter(DateAdded__month=today_month,
DateAdded__year=today_year,
DateAdded__day=count,
purpose='sell').count()
total_payment_that_day = SMSPayment.objects.filter(DateAdded__month=today_month,
DateAdded__year=today_year,
DateAdded__day=count).count()
if max_table_2 < total_buy_that_day:
max_table_2 = total_buy_that_day + 2
if max_table_2 < total_sell_that_day:
max_table_2 = total_sell_that_day + 2
if max_table_2 < total_payment_that_day:
max_table_2 = total_payment_that_day + 2
data_4 += '[gd(%s, %s, %s), %s],' % (today_year, today_month, count, total_buy_that_day)
data_5 += '[gd(%s, %s, %s), %s],' % (today_year, today_month, count, total_sell_that_day)
data_6 += '[gd(%s, %s, %s), %s],' % (today_year, today_month, count, total_payment_that_day)
count += 1
data_2 = data_2[:-1]
data_3 = data_3[:-1]
data_4 = data_4[:-1]
data_5 = data_5[:-1]
data_6 = data_6[:-1]
number_of_transactions = Transaction.objects.filter().count()
number_of_transactions_with_due = Transaction.objects.filter(total_due__gt=0).count()
number_of_transactions_without_due = Transaction.objects.filter(total_due__lte=0).count()
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
print(all_consumer_for_base.count)
return render(request, 'pages/index.html', {'shop_list_base': all_shop_for_base,
'number_of_reg_calls': number_of_reg_calls,
'transcriber_name': transcriber_name,
'number_of_transaction_calls': number_of_transaction_calls,
'all_consumer_for_base' :all_consumer_for_base,
'reg_call_percentage': reg_call_percentage,
'transaction_call_percentage': transaction_call_percentage,
'data_2': data_2,
'data_3': data_3,
'data_4': data_4,
'data_5': data_5,
'data_6': data_6,
'max': max,
'number_of_transactions': number_of_transactions,
'number_of_transactions_with_due': number_of_transactions_with_due,
'number_of_transactions_without_due': number_of_transactions_without_due,
'max_table_2': max_table_2,
'total_sell': total_sell,
'total_buy': total_buy,
'total_money_transaction': total_money_transaction,
'sell_percentage': sell_percentage,
'buy_percentage': buy_percentage,
'money_transaction_percentage': money_transaction_percentage,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def translator_page(request):
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/translator.html', {'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
# all report views are here
@login_required(login_url='/login/')
def report_monthly_shop(request):
get_data = request.GET
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
shop_id = shop_object.id
total_sell = 0
total_sell_due = 0
total_sell_paid = 0
total_purchase = 0
total_purchase_due = 0
total_purchase_paid = 0
for month_sell in BuyerSellerAccount.objects.filter(seller=shop_object):
total_sell += month_sell.total_amount_of_transaction
total_sell_due += month_sell.total_due
total_sell_paid += month_sell.total_paid
for month_purchase in BuyerSellerAccount.objects.filter(buyer=shop_object):
total_purchase += month_purchase.total_amount_of_transaction
total_purchase_due += month_purchase.total_due
total_purchase_paid += month_purchase.total_paid
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_monthly_shop.html', {'shop_list_base': all_shop_for_base,
'shop_name': shop_name,
'shop_id': shop_id,
'all_consumer_for_base' :all_consumer_for_base,
'total_sell': total_sell,
'transcriber_name': transcriber_name,
'total_sell_due': total_sell_due,
'total_sell_paid': total_sell_paid,
'bangla': bangla,
'total_purchase': total_purchase,
'total_purchase_due': total_purchase_due,
'total_purchase_paid': total_purchase_paid,
'all_user_for_base': all_user_for_base})
# report_monthly_shop_json
@login_required(login_url='/login/')
def report_monthly_shop_json(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(id=shop_name)
shop_inventory = BuySellProfitInventoryIndividual.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
output = '{"data": [ '
if get_data['t'] == '1':
rank = 1
this_year = datetime.date.today().year
# this_month = 1
this_day = 1
for this_month in range(1, 13, 1):
count = 0
for this_day in range(1, 32, 1):
for a_product in Product.objects.all():
product_price = 0
product_name = a_product.name
total_sell = 0
total_due = 0
total_paid = 0
for this_day_sell_transaction in Transaction.objects.filter(seller=shop_object,
DateAdded__year=this_year,
DateAdded__month=this_month,
DateAdded__day=this_day):
total_sell += this_day_sell_transaction.total_amount
total_due += this_day_sell_transaction.total_due
total_paid += this_day_sell_transaction.total_paid
count += 1
total_purchase = 0
total_purchase_due = 0
total_purchase_paid = 0
for this_day_purchase_transaction in Transaction.objects.filter(buyer=shop_object,
DateAdded__year=this_year,
DateAdded__month=this_month,
DateAdded__day=this_day):
total_purchase += this_day_purchase_transaction.total_amount
total_purchase_due += this_day_purchase_transaction.total_due
total_purchase_paid += this_day_purchase_transaction.total_paid
count += 1
if count > 0:
output += '["%s/%s/%s","%s","%s","%s","%s","%s","%s"] ,' % (this_day, this_month, this_year,
total_sell, total_paid, total_due,
total_purchase, total_purchase_paid,
total_purchase_due)
count = 0
# this_day += 1
# this_month = this_month + 1
if get_data['t'] == '2':
for this_day_transaction in Transaction.objects.filter(Q(seller=shop_object) | Q(buyer=shop_object)):
# start counting for this product
id = this_day_transaction.pk
date = this_day_transaction.DateAdded
if this_day_transaction.seller == shop_object:
with_trade = this_day_transaction.buyer
trade_type = 'Sell'
elif this_day_transaction.buyer == shop_object:
with_trade = this_day_transaction.seller
trade_type = 'Buy'
number_of_items = ProductsInTransaction.objects.filter(TID=this_day_transaction).count()
total_amount = this_day_transaction.total_amount
total_paid = this_day_transaction.total_paid
total_due = this_day_transaction.total_due
output += '["%s","%s","%s","%s","%s","%s","%s","%s"] ,' % (id, date, with_trade, trade_type,
number_of_items, total_amount,
total_paid, total_due)
output = output[:-1]
output += ']}'
return HttpResponse(output, content_type="text/plain")
@login_required(login_url='/login/')
def report_sales_analysis(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
shop_id = shop_object.id
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
if 'ban' in get_data:
bangla = True
else:
bangla = False
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_sales_analysis.html', {'shop_list_base': all_shop_for_base,
'shop_name': shop_name,
'all_consumer_for_base' :all_consumer_for_base,
'shop_id': shop_id,
'bangla': bangla,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_sales_analysis_json(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(id=shop_name)
shop_inventory = BuySellProfitInventoryIndividual.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
output = '{"data": [ '
if get_data['t'] == '1':
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
output += '["%s","%s","%s"] ,' % (rank, product_name, str(count) + ' ' + a_product.retail_unit)
rank += 1
if get_data['t'] == '2':
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
previous_product_price = 0
change = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if count == 0:
previous_product_price = product_in_this_transaction.price_per_unit
product_price = product_in_this_transaction.price_per_unit
change += abs(previous_product_price - product_price)
count += 1
if count > 0:
output += '["%s","%s","%s","%s"] ,' % (rank, product_name, count,
change/count)
rank += 1
if get_data['t'] == '3':
this_year = datetime.date.today().year
this_month = datetime.date.today().month
day = 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
while day < 32:
day_string = True
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__year=this_year,
DateAdded__month=this_month, DateAdded__day=day):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
if day_string:
output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
day_string = False
output += '["","%s","%s","%s","%s"] ,' % (rank, product_name,
str(count) + ' ' + a_product.retail_unit,
float(product_price / count))
rank += 1
day += 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
if get_data['t'] == '4':
day = 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
while day < 8:
day_string = True
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__week_day=day):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
if day_string:
if day == 1:
output += '["%s","","","",""] ,' % 'Sunday'
elif day == 2:
output += '["%s","","","",""] ,' % 'Monday'
elif day == 3:
output += '["%s","","","",""] ,' % 'Tuesday'
elif day == 4:
output += '["%s","","","",""] ,' % 'Wednesday'
elif day == 5:
output += '["%s","","","",""] ,' % 'Thursday'
elif day == 6:
output += '["%s","","","",""] ,' % 'Friday'
elif day == 7:
output += '["%s","","","",""] ,' % 'Saturday'
day_string = False
output += '["","%s","%s","%s","%s"] ,' % (rank, product_name,
str(count) + ' ' + a_product.retail_unit,
float(product_price / count))
rank += 1
day += 1
if get_data['t'] == '5':
this_year = datetime.date.today().year
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
output += '["%s","%s"] ,' % (product_name, product_profit)
output = output[:-1]
output += ']}'
return HttpResponse(output, content_type="text/plain")
@login_required(login_url='/login/')
def report_payment(request):
get_data = request.GET
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
sell_transaction_with_due = Transaction.objects.filter(seller_id=shop_object, total_due__lte=0)
buy_transaction_with_due = Transaction.objects.filter(buyer_id=shop_object, total_due__lte=0)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
buyer_account = BuyerSellerAccount.objects.filter(seller=shop_object, total_due__lte=0)
seller_account = BuyerSellerAccount.objects.filter(buyer=shop_object, total_due__lte=0)
all_user_for_base = Consumer.objects.all()
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
transcriber_name = request.session['user']
return render(request, 'pages/report_payment.html', {'shop_list_base': all_shop_for_base,
'sell_transaction_with_due': sell_transaction_with_due,
'buy_transaction_with_due': buy_transaction_with_due,
'all_consumer_for_base' :all_consumer_for_base,
'buyer_account': buyer_account,
'transcriber_name': transcriber_name,
'seller_account': seller_account,
'shop_name': shop_name,
'bangla': bangla,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_due(request):
get_data = request.GET
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
sell_transaction_with_due = Transaction.objects.filter(seller_id=shop_object, total_due__gt=0)
buy_transaction_with_due = Transaction.objects.filter(buyer_id=shop_object, total_due__gt=0)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
buyer_account = SMSPayment.objects.filter(seller=shop_object)
seller_account = SMSPayment.objects.filter(buyer=shop_object)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_due.html', {'shop_list_base': all_shop_for_base,
'sell_transaction_with_due': sell_transaction_with_due,
'buy_transaction_with_due': buy_transaction_with_due,
'buyer_account': buyer_account,
'all_consumer_for_base' :all_consumer_for_base,
'bangla': bangla,
'seller_account': seller_account,
'transcriber_name': transcriber_name,
'shop_name': shop_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_profit(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
shop_id = shop_object.id
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_profit.html', {'shop_list_base': all_shop_for_base,
'shop_name': shop_name,
'shop_id': shop_id,
'all_consumer_for_base' :all_consumer_for_base,
'bangla': bangla,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_profit_json(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(id=shop_name)
shop_inventory = BuySellProfitInventoryIndividual.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
output = '{"data": [ '
if get_data['t'] == '1':
this_year = datetime.date.today().year
this_month = 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
while this_month < 13:
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object,
DateAdded__year=this_year,
DateAdded__month=this_month):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
if count > 0:
if day_string:
if this_month == 1:
output += '["January","",""], '
elif this_month == 2:
output += '["February","",""], '
elif this_month == 3:
output += '["March","",""], '
elif this_month == 4:
output += '["April","",""], '
elif this_month == 5:
output += '["May","",""], '
elif this_month == 6:
output += '["June","",""], '
elif this_month == 7:
output += '["July","",""], '
elif this_month == 8:
output += '["August","",""], '
elif this_month == 9:
output += '["September","",""], '
elif this_month == 10:
output += '["October","",""], '
elif this_month == 11:
output += '["November","",""], '
elif this_month == 12:
output += '["December","",""], '
day_string = False
output += '["","%s","%s"] ,' % (product_name, product_profit)
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
this_month += 1
if get_data['t'] == '2':
this_year = datetime.date.today().year
this_month = 1
while this_month < 13:
day = 1
while day < 32:
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object,
DateAdded__year=this_year,
DateAdded__month=this_month,
DateAdded__day=day):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
if count > 0:
if day_string:
output += '["%s/%s/%s","",""] ,' % (day, this_month, this_year)
day_string = False
output += '["","%s","%s"] ,' % (product_name, product_profit)
day += 1
this_month += 1
if get_data['t'] == '3':
this_year = datetime.date.today().year
this_month = datetime.date.today().month
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object,
DateAdded__year=this_year,
DateAdded__month=this_month):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
output += '["%s","%s"] ,' % (product_name, product_profit)
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
if get_data['t'] == '4':
this_year = datetime.date.today().year
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object,
DateAdded__year=this_year):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
output += '["%s","%s"] ,' % (product_name, product_profit)
if get_data['t'] == '5':
this_year = datetime.date.today().year
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
output += '["%s","%s"] ,' % (product_name, product_profit)
output = output[:-1]
output += ']}'
return HttpResponse(output, content_type="text/plain")
@login_required(login_url='/login/')
def report_product(request):
get_data = request.GET
shop_name = get_data['shop']
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_object = Consumer.objects.get(name=shop_name)
shop_id = shop_object.id
shop_inventory = Inventory.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
selected_products = ProductsInTransaction.objects.filter(TID=Transaction.objects.filter(seller=shop_object))
selected_products_buy = ProductsInTransaction.objects.filter(TID=Transaction.objects.filter(buyer=shop_object))
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_product.html', {'shop_list_base': all_shop_for_base,
'shop_inventory': shop_inventory,
'shop_name': shop_name,
'shop_id': shop_id,
'bangla': bangla,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'selected_products_buy': selected_products_buy,
'selected_products': selected_products,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_product_json(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(id=shop_name)
shop_inventory = Inventory.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
output = '{"data": [ '
if get_data['t'] == '1':
this_year = datetime.date.today().year
this_month = datetime.date.today().month
day = 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
while day < 32:
day_string = True
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__year=this_year,
DateAdded__month=this_month, DateAdded__day=day):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
# if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
# if a_product.bulk_to_retail_unit == 0:
# count = count + product_in_this_transaction.quantity
# product_price = product_price + product_in_this_transaction.price_per_unit
# else:
# count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
# product_price = product_price + product_in_this_transaction.price_per_unit
# else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit * product_in_this_transaction.quantity
if count > 0:
if day_string:
output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
day_string = False
output += '["","%s","%s","%s","%s"] ,' % (product_name, count,
a_product.retail_unit,
float(product_price / count))
day += 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
if get_data['t'] == '2':
this_year = datetime.date.today().year
this_month = datetime.date.today().month
day = 1
while day < 32:
day_string = True
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(buyer=shop_object, DateAdded__year=this_year,
DateAdded__month=this_month, DateAdded__day=day):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
if day_string:
output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
day_string = False
output += '["","%s","%s","%s","%s"] ,' % (product_name, count,
a_product.bulk_wholesale_unit,
float(product_price / count))
day += 1
if get_data['t'] == '3':
this_year = datetime.date.today().year
this_month = datetime.date.today().month
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__year=this_year, DateAdded__month=this_month):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
output += '["%s","%s","%s","%s"] ,' % (product_name, count,
a_product.retail_unit,
float(product_price / count))
if get_data['t'] == '4':
this_year = datetime.date.today().year
this_month = datetime.date.today().month
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(buyer=shop_object, DateAdded__year=this_year, DateAdded__month=this_month):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
output += '["%s","%s","%s","%s"] ,' % (product_name, count,
a_product.retail_unit,
float(product_price / count))
output = output[:-1]
output += ']}'
selected_products_buy = ProductsInTransaction.objects.filter(TID=Transaction.objects.filter(buyer=shop_object))
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
return HttpResponse(output, content_type="text/plain")
# paste the template name of the report_analytical instead of report_product here
@login_required(login_url='/login/')
def report_analytical(request):
all_product = Product.objects.all()
final_output = ''
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
shop_id = shop_object.id
for product in all_product:
print(product.name)
if ProductsInTransaction.objects.filter(product=product).exists():
product_output = "[%s, " % product.name
sold_amount = 0
for product_details in ProductsInTransaction.objects.filter(product=product):
sold_amount = sold_amount + product_details.quantity
product_output += str(sold_amount)
final_output += product_output
final_output += "] ,"
print(final_output)
final_output = final_output[:-1]
print(final_output)
add_notification = False
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/reports_analytical.html',
{'all_product': all_product, 'add_notification': add_notification,
'shop_list_base': all_shop_for_base, 'product_sell': final_output,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'shop_name': shop_name,
'shop_id': shop_id,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_analytical_json(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(id=shop_name)
if get_data['t'] == '1':
all_product = Product.objects.all()
final_output = '{"cols": [ { "id": "", "label": "Topping", "pattern": "", "type": "string" }, ' \
'{ "id": "", "label": "Units", "pattern": "", "type": "number" } ], "rows": [ '
for product in all_product:
print(product.name)
if ProductsInTransaction.objects.filter(product=product).exists():
product_name = product.name
sold_amount = 0
for transaction_id in Transaction.objects.filter(seller=shop_object):
for product_details in ProductsInTransaction.objects.filter(product=product, TID=transaction_id):
sold_amount = sold_amount + product_details.quantity
final_output += '{"c": [{"v": "%s","f": null},{"v": %s,"f": null}]},' % (product_name,
sold_amount)
final_output = final_output[:-1]
print(final_output)
if get_data['t'] == '2':
all_product = BuySellProfitInventory.objects.filter(shop=shop_object)
final_output = '{"cols": [ { "id": "", "label": "Topping", "pattern": "", "type": "string" }, ' \
'{ "id": "", "label": "Profit", "pattern": "", "type": "number" } ], "rows": [ '
for product in all_product:
final_output += '{"c": [{"v": "%s","f": null},{"v": %s,"f": null}]},' % (product.product,
product.profit)
final_output = final_output[:-1]
print(final_output)
final_output += ']}'
print(final_output)
return HttpResponse(final_output, content_type="text/plain")
# till this views created based on the list from mail
@login_required(login_url='/login/')
def report_recharge(request):
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_recharge.html', {'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_callduration(request):
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_callduration_graph.html', {'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
# not necessary
@login_required(login_url='/login/')
def report_transaction(request):
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_transaction.html', {'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_calltranscription(request):
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_transcription.html', {'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_usercall(request):
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_user_call_recharge.html', {'shop_list_base': all_shop_for_base,
'transcriber_name': transcriber_name,
'all_consumer_for_base' :all_consumer_for_base,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def transcription_page(request):
print(request.POST)
number_of_pending_calls = VoiceRecord.objects.filter(transcribed=False).count()
number_of_pending_reg_calls = VoiceReg.objects.filter(completed=False).count()
type_of_subscriber = ConsumerType.objects.all()
number_of_fail_calls = VoiceRecord.objects.filter(with_error=True).count()
number_of_completed_calls = VoiceRecord.objects.filter(with_error=False, transcribed=True).count()
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
return render(request, 'pages/transcription.html',
dict(pending_calls=number_of_pending_calls, types=type_of_subscriber,
pending_calls_reg=number_of_pending_reg_calls, number_of_fail_calls=str(number_of_fail_calls),
number_of_completed_calls=number_of_completed_calls, transcriber_name=transcriber_name,
shop_list_base=all_shop_for_base,all_consumer_for_base=all_consumer_for_base,
all_user_for_base=all_user_for_base))
# report views ends here
@login_required(login_url='/login/')
def add_subscriber_page(request):
all_subscriber = Consumer.objects.all()
type_of_subscriber = ConsumerType.objects.all()
add_notification = False
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
notification = ''
if 'delete' in request.GET:
get_data = request.GET
add_notification = True
delID = get_data['delete']
if Consumer.objects.filter(id=delID).exists():
item_for_delete = Consumer.objects.get(id=delID)
notification = 'Daily statement for the user : ' + item_for_delete.name + ' is sent successfully.'
# item_for_delete.delete()
sales_statement = ''
purchase_statement = ''
today_date = datetime.date.today()
today_day = today_date.day
today_month = today_date.month
today_year = today_date.year
# for selling
sell_transactions = Transaction.objects.filter(seller=item_for_delete, DateAdded__day=today_day,
DateAdded__month=today_month, DateAdded__year=today_year)
total_sales = 0
total_due = 0
total_paid = 0
for sell_transaction in sell_transactions:
total_sales += sell_transaction.total_amount
total_paid += sell_transaction.total_paid
total_due += sell_transaction.total_due
if total_sales > 0:
sales_statement = ' bikroy korechen mot: ' + str(total_sales) + ' takar. nogod peyechen : ' + \
str(total_paid) + ' taka ebong baki royeche ' + str(total_due) + ' taka.'
buy_transactions = Transaction.objects.filter(buyer=item_for_delete, DateAdded__day=today_day,
DateAdded__month=today_month, DateAdded__year=today_year)
total_purchase = 0
total_due = 0
total_paid = 0
for buy_transaction in buy_transactions:
total_purchase += buy_transaction.total_amount
total_paid += buy_transaction.total_paid
total_due += buy_transaction.total_due
if total_purchase > 0:
purchase_statement = ' kinechen mot: ' + str(total_purchase) + ' takar. Nogod diyechen : ' + \
str(total_paid) + ' taka ebong baki royeche ' + str(total_due) + ' taka.'
final_text = 'Aj apni' + sales_statement + purchase_statement + ' Dhonnobad'
if total_purchase > 0 or total_sales > 0:
print(final_text)
send_sms(final_text, item_for_delete.phone)
else:
notification = 'Item not found'
return render(request, 'pages/add_subscriber.html',
{'subscribers': all_subscriber, 'types': type_of_subscriber, 'add_notification': add_notification,
'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'notification':notification,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def add_product_page(request):
all_product = Product.objects.all()
add_notification = False
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
notification = ''
if 'delete' in request.GET:
get_data = request.GET
add_notification = True
delID = get_data['delete']
if Product.objects.filter(id=delID).exists():
item_for_delete = Product.objects.get(id=delID)
notification = 'The product : ' + item_for_delete.name + ' is deleted successfully.'
item_for_delete.delete()
else:
notification = 'Item not found'
return render(request, 'pages/add_product.html',
{'all_product': all_product, 'add_notification': add_notification,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,'notification': notification,
'shop_list_base': all_shop_for_base, 'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_transcriber_performance(request):
all_product = Product.objects.all()
add_notification = False
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_transcriber_performance.html',
{'all_product': all_product, 'add_notification': add_notification,
'transcriber_name': transcriber_name,
'all_consumer_for_base' :all_consumer_for_base,
'shop_list_base': all_shop_for_base, 'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_transcriber_performance_json(request):
final_output = '{"data": [ '
for transcriber in Transcriber.objects.all():
number_of_transcriptions = TranscriberInTranscription.objects.filter(name=transcriber).count()
total_time_taken = 0
total_product_trancribed = 0
for transcriber_in_transaction in TranscriberInTranscription.objects.filter(name=transcriber):
total_time_taken += float(transcriber_in_transaction.time_taken)
total_product_trancribed += transcriber_in_transaction.number_of_products
if number_of_transcriptions > 0:
avg_time = total_time_taken / number_of_transcriptions
avg_product = total_product_trancribed / number_of_transcriptions
final_output += '["%s","%s","%s","%s","%s"] ,' % (transcriber.id, transcriber.name,
number_of_transcriptions, avg_time, avg_product)
final_output = final_output[:-1]
final_output += ']}'
return HttpResponse(final_output, content_type="text/plain")
@login_required(login_url='/login/')
def user_balance_recharge(request):
post_data = request.POST
notification = ''
for all_consumers in Consumer.objects.all():
if Recharge.objects.filter(user=all_consumers).exists():
print('Already_Added')
else:
new_added = Recharge(user=all_consumers)
new_added.save()
if TotalRecharge.objects.filter(user=all_consumers).exists():
print('Already_Added')
else:
new_added = TotalRecharge(user=all_consumers)
new_added.save()
add_notification = False
if 'user' in post_data and 'recharge_amount' in post_data:
user_name = post_data['user']
user_object = Consumer.objects.get(name=user_name)
if is_number(post_data['recharge_amount']) or is_float(post_data['recharge_amount']):
new_recharge_added = Recharge(user=user_object, amount=float(post_data['recharge_amount']))
new_recharge_added.save()
new_recharge_update = TotalRecharge.objects.get(user=user_object)
new_recharge_update.amount += float(post_data['recharge_amount'])
new_recharge_update.save()
add_notification = True
notification = 'Amount %s has been added to the number %s' %(post_data['recharge_amount'],
user_object.phone)
else:
notification = 'Something went wrong. Please try again.'
recharge_all = TotalRecharge.objects.all()
today_date = datetime.date.today()
today_day = today_date.day
today_month = today_date.month
today_year = today_date.year
recharge_today = Recharge.objects.filter(DateAdded__day=today_day,
DateAdded__month=today_month, DateAdded__year=today_year, amount__gt=0)
all_product = Product.objects.all()
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'pages/report_user_call_recharge.html',
{'all_product': all_product, 'add_notification': add_notification,
'all_consumer_for_base' :all_consumer_for_base,
'shop_list_base': all_shop_for_base, 'recharge_all': recharge_all,
'transcriber_name': transcriber_name,
'recharge_today': recharge_today, 'all_user_for_base': all_user_for_base,
'notification': notification})
# views for printing
@login_required(login_url='/login/')
def report_monthly_shop_print(request):
get_data = request.GET
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
total_sell = 0
total_sell_due = 0
total_sell_paid = 0
total_purchase = 0
total_purchase_due = 0
total_purchase_paid = 0
for month_sell in BuyerSellerAccount.objects.filter(seller=shop_object):
total_sell += month_sell.total_amount_of_transaction
total_sell_due += month_sell.total_due
total_sell_paid += month_sell.total_paid
for month_purchase in BuyerSellerAccount.objects.filter(buyer=shop_object):
total_purchase += month_purchase.total_amount_of_transaction
total_purchase_due += month_purchase.total_due
total_purchase_paid += month_purchase.total_paid
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
transcriber_name = request.session['user']
return render(request, 'print/report_monthly_shop.html', {'shop_list_base': all_shop_for_base,
'shop_name': shop_name,
'all_consumer_for_base' :all_consumer_for_base,
'total_sell': total_sell,
'transcriber_name': transcriber_name,
'total_sell_due': total_sell_due,
'total_sell_paid': total_sell_paid,
'bangla': bangla,
'total_purchase': total_purchase,
'total_purchase_due': total_purchase_due,
'total_purchase_paid': total_purchase_paid,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_due_print(request):
get_data = request.GET
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
sell_transaction_with_due = Transaction.objects.filter(seller_id=shop_object, total_due__gt=0)
buy_transaction_with_due = Transaction.objects.filter(buyer_id=shop_object, total_due__gt=0)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
buyer_account = BuyerSellerAccount.objects.filter(seller=shop_object, total_due__gt=0)
seller_account = BuyerSellerAccount.objects.filter(buyer=shop_object, total_due__gt=0)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'print/report_due.html', {'shop_list_base': all_shop_for_base,
'sell_transaction_with_due': sell_transaction_with_due,
'buy_transaction_with_due': buy_transaction_with_due,
'buyer_account': buyer_account,
'all_consumer_for_base' :all_consumer_for_base,
'bangla': bangla,
'seller_account': seller_account,
'transcriber_name': transcriber_name,
'shop_name': shop_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_payment_print(request):
get_data = request.GET
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_name = get_data['shop']
shop_object = Consumer.objects.get(name=shop_name)
sell_transaction_with_due = Transaction.objects.filter(seller_id=shop_object, total_due__lte=0)
buy_transaction_with_due = Transaction.objects.filter(buyer_id=shop_object, total_due__lte=0)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
buyer_account = BuyerSellerAccount.objects.filter(seller=shop_object, total_due__lte=0)
seller_account = BuyerSellerAccount.objects.filter(buyer=shop_object, total_due__lte=0)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'print/report_payment.html', {'shop_list_base': all_shop_for_base,
'sell_transaction_with_due': sell_transaction_with_due,
'buy_transaction_with_due': buy_transaction_with_due,
'all_consumer_for_base' :all_consumer_for_base,
'buyer_account': buyer_account,
'transcriber_name': transcriber_name,
'seller_account': seller_account,
'shop_name': shop_name,
'bangla': bangla,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_product_print(request):
get_data = request.GET
shop_name = get_data['shop']
if 'ban' in get_data:
bangla = True
else:
bangla = False
shop_object = Consumer.objects.get(name=shop_name)
shop_inventory = Inventory.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
selected_products = ProductsInTransaction.objects.filter(TID=Transaction.objects.filter(seller=shop_object))
selected_products_buy = ProductsInTransaction.objects.filter(TID=Transaction.objects.filter(buyer=shop_object))
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'print/report_product.html', {'shop_list_base': all_shop_for_base,
'shop_inventory': shop_inventory,
'shop_name': shop_name,
'bangla': bangla,
'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'selected_products_buy': selected_products_buy,
'selected_products': selected_products,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_sales_analysis_print(request):
get_data = request.GET
shop_name = get_data['shop']
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
transcriber_name = request.session['user']
return render(request, 'print/report_sales_analysis.html', {'shop_list_base': all_shop_for_base,
'all_consumer_for_base' :all_consumer_for_base,
'shop_name': shop_name,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_profit_print(request):
get_data = request.GET
shop_name = get_data['shop']
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'print/report_profit.html', {'shop_list_base': all_shop_for_base,
'shop_name': shop_name,
'all_consumer_for_base':all_consumer_for_base,
'transcriber_name': transcriber_name,
'all_user_for_base': all_user_for_base})
@login_required(login_url='/login/')
def report_transcriber_performance_print(request):
all_product = Product.objects.all()
add_notification = False
shop_consumer = ConsumerType.objects.get(type_name='Seller')
all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
all_user_for_base = Consumer.objects.all()
transcriber_name = request.session['user']
shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
return render(request, 'print/report_transcriber_performance.html',
{'all_product': all_product, 'add_notification': add_notification,
'all_consumer_for_base':all_consumer_for_base,
'transcriber_name': transcriber_name,
'shop_list_base': all_shop_for_base, 'all_user_for_base': all_user_for_base})
# SR section
@login_required(login_url='/login/')
def sr_monthly_report(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
return render(request, 'pages/SR/report_monthly.html', {'transcriber_name': transcriber_name,
'allTransaction': allTransaction})
@login_required(login_url='/login/')
def sr_due_report(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
allBalance = BuyerSellerAccount.objects.filter(seller=sr_object)
sell_transaction = Transaction.objects.filter(seller=sr_object)
dueTransactions = dueTransaction.objects.filter(seller=sr_object)
return render(request, 'pages/SR/report_due.html', {'transcriber_name': transcriber_name,
'sell_transaction': sell_transaction,
'dueTransactions': dueTransactions,
'allBalance': allBalance})
@login_required(login_url='/login/')
def sr_report_sales_analysis(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
post_data = request.POST
print(post_data)<|fim▁hole|> shop_object = sr_object
shop_name = shop_object.name
shop_id = shop_object.id
if 'month' in post_data and 'year' in post_data:
month = post_data['month']
year = post_data['year']
else:
month = datetime.date.today().month
year = datetime.date.today().year
return render(request, 'pages/SR/report_sales_analysis.html', {'shop_name': shop_name,
# 'all_consumer_for_base' :all_consumer_for_base,
'shop_id': shop_id,
# 'bangla': bangla,
'transcriber_name': transcriber_name,
'month': month,
'year': year})
@login_required(login_url='/login/')
def sr_report_sales_analysis_json(request):
get_data = request.GET
shop_name = get_data['shop']
shop_object = Consumer.objects.get(id=shop_name)
shop_inventory = BuySellProfitInventoryIndividual.objects.filter(shop=shop_object)
shop_consumer = ConsumerType.objects.get(type_name='Seller')
this_year = get_data['year']
print(this_year)
this_month = get_data['month']
output = '{"data": [ '
if get_data['t'] == '1':
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__year=this_year,
DateAdded__month=this_month):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
output += '["%s","%s","%s"] ,' % (rank, product_name, str(count) + ' ' + a_product.retail_unit)
rank += 1
if get_data['t'] == '2':
rank = 1
for a_product in Product.objects.all():
count = 0
# product_price = 0
previous_product_price = 0
change = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if count == 0:
previous_product_price = product_in_this_transaction.price_per_unit
product_price = product_in_this_transaction.price_per_unit
change += abs(previous_product_price - product_price)
count += 1
if count > 0:
output += '["%s","%s","%s","%s"] ,' % (rank, product_name, count,
change/count)
rank += 1
if get_data['t'] == '3':
print(this_month)
day = 1
#
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
while day < 32:
day_string = True
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__year=this_year,
DateAdded__month=this_month, DateAdded__day=day):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
if day_string:
output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
day_string = False
output += '["","%s","%s","%s","%s"] ,' % (rank, product_name,
str(count) + ' ' + a_product.retail_unit,
float(product_price / count))
rank += 1
day += 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
if get_data['t'] == '4':
day = 1
# output += '["%s/%s/%s","","","",""] ,' % (day, this_month, this_year)
while day < 8:
day_string = True
rank = 1
for a_product in Product.objects.all():
count = 0
product_price = 0
product_name = a_product.name
for this_day_transaction in Transaction.objects.filter(seller=shop_object, DateAdded__week_day=day):
# start counting for this product
for product_in_this_transaction in ProductsInTransaction.objects.filter(TID=this_day_transaction):
if product_in_this_transaction.product == a_product:
if product_in_this_transaction.unit == a_product.bulk_wholesale_unit:
if a_product.bulk_to_retail_unit == 0:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
else:
count = count + product_in_this_transaction.quantity * a_product.bulk_to_retail_unit
product_price = product_price + product_in_this_transaction.price_per_unit / a_product.bulk_to_retail_unit
else:
count = count + product_in_this_transaction.quantity
product_price = product_price + product_in_this_transaction.price_per_unit
if count > 0:
if day_string:
if day == 1:
output += '["%s","","","",""] ,' % 'Sunday'
elif day == 2:
output += '["%s","","","",""] ,' % 'Monday'
elif day == 3:
output += '["%s","","","",""] ,' % 'Tuesday'
elif day == 4:
output += '["%s","","","",""] ,' % 'Wednesday'
elif day == 5:
output += '["%s","","","",""] ,' % 'Thursday'
elif day == 6:
output += '["%s","","","",""] ,' % 'Friday'
elif day == 7:
output += '["%s","","","",""] ,' % 'Saturday'
day_string = False
output += '["","%s","%s","%s","%s"] ,' % (rank, product_name,
str(count) + ' ' + a_product.retail_unit,
float(product_price / count))
rank += 1
day += 1
if get_data['t'] == '5':
this_year = datetime.date.today().year
day_string = True
for a_product in Product.objects.all():
count = 0
product_profit = 0
product_name = a_product.name
for this_day_transaction in BuySellProfitInventoryIndividual.objects.filter(shop_id=shop_object):
# start counting for this product
if this_day_transaction.product == a_product:
product_profit += this_day_transaction.profit
count += 1
output += '["%s","%s"] ,' % (product_name, product_profit)
output = output[:-1]
output += ']}'
return HttpResponse(output, content_type="text/plain")
# Distributor Section
@login_required(login_url='/login/')
def add_sr_page(request):
dr_name = request.session['user']
dr_object = ACL.objects.get(loginID=dr_name).loginUser
transcriber_name = dr_object.name
all_subscriber = ACL.objects.filter(distUser=dr_object)
# type_of_subscriber = ConsumerType.objects.all()
add_notification = False
# shop_consumer = ConsumerType.objects.get(type_name='Seller')
# all_shop_for_base = Consumer.objects.filter(type=shop_consumer)
# all_user_for_base = Consumer.objects.all()
# transcriber_name = request.session['user']
# shop_consumer2 = ConsumerType.objects.get(type_name='Buyer')
# all_consumer_for_base = Consumer.objects.filter(type=shop_consumer2)
notification = ''
if 'delete' in request.GET:
get_data = request.GET
add_notification = True
delID = get_data['delete']
if Consumer.objects.filter(id=delID).exists():
item_for_delete = Consumer.objects.get(id=delID)
notification = 'The Consumer : ' + item_for_delete.name + ' is deleted successfully.'
item_for_delete.delete()
else:
notification = 'Item not found'
return render(request, 'pages/Distributor/add_SR.html',
{'subscribers': all_subscriber,'add_notification': add_notification,
# 'shop_list_base': all_shop_for_base,
# 'all_consumer_for_base' :all_consumer_for_base,
'transcriber_name': transcriber_name,
'notification': notification})
@login_required(login_url='/login/')
def dr_monthly_report(request):
dr_name = request.session['user']
dr_object = ACL.objects.get(loginID=dr_name).loginUser
transcriber_name = dr_object.name
transcriber_id = dr_object.id
all_subscriber = ACL.objects.filter(distUser=dr_object)
post_data = request.POST
print(post_data)
if 'sr' in post_data:
sr_object = Consumer.objects.get(id=post_data['sr'])
allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
return render(request, 'pages/Distributor/report_monthly.html', {'transcriber_name': transcriber_name,
'hasReport': True,
'subscribers': all_subscriber,
'transcriber_id': transcriber_id,
'allTransaction': allTransaction})
else:
# allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
return render(request, 'pages/Distributor/report_monthly.html', {'transcriber_name': transcriber_name,
'transcriber_id': transcriber_id,
'subscribers': all_subscriber,
'hasReport': False})
@login_required(login_url='/login/')
def dr_due_report(request):
sr_name = request.session['user']
dr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = dr_object.name
transcriber_id = dr_object.id
all_subscriber = ACL.objects.filter(distUser=dr_object)
post_data = request.POST
if 'sr' in post_data:
sr_object = Consumer.objects.get(id=post_data['sr'])
allBalance = BuyerSellerAccount.objects.filter(seller=sr_object)
sell_transaction = Transaction.objects.filter(seller=sr_object)
dueTransactions = dueTransaction.objects.filter(seller=sr_object)
# allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
return render(request, 'pages/Distributor/report_due.html', {'transcriber_name': transcriber_name,
'sell_transaction': sell_transaction,
'dueTransactions': dueTransactions,
'transcriber_id': transcriber_id,
'hasReport': True,
'subscribers': all_subscriber,
'allBalance': allBalance})
else:
# allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
return render(request, 'pages/Distributor/report_due.html', {'transcriber_name': transcriber_name,
'transcriber_id': transcriber_id,
'subscribers': all_subscriber,
'hasReport': False})
@login_required(login_url='/login/')
def dr_report_sales_analysis(request):
dr_name = request.session['user']
dr_object = ACL.objects.get(loginID=dr_name).loginUser
transcriber_name = dr_object.name
transcriber_id = dr_object.id
post_data = request.POST
print(post_data)
# shop_object = sr_object
#
all_subscriber = ACL.objects.filter(distUser=dr_object)
hasReport = False
if 'sr' in post_data:
shop_id = post_data['sr']
shop_name = Consumer.objects.get(id=shop_id).name
hasReport = True
if 'month' in post_data and 'year' in post_data:
month = post_data['month']
year = post_data['year']
else:
month = datetime.date.today().month
year = datetime.date.today().year
return render(request, 'pages/Distributor/report_sales_analysis.html', {'shop_name': shop_name,
'transcriber_id': transcriber_id,
'shop_id': shop_id,
'subscribers': all_subscriber,
'transcriber_name': transcriber_name,
'month': month,
'hasReport': hasReport,
'year': year})
else:
return render(request, 'pages/Distributor/report_sales_analysis.html', {'shop_name': 'Not Selected',
'transcriber_id': transcriber_id,
'subscribers': all_subscriber,
'transcriber_name': transcriber_name,
'hasReport': hasReport})
# Shop Module
@login_required(login_url='/login/')
def shop_monthly_report(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
allTransactionIn = BuyerSellerAccount.objects.filter(buyer=sr_object)
return render(request, 'pages/Shop/report_monthly.html', {'transcriber_name': transcriber_name,
'allTransactionIn': allTransactionIn,
'allTransaction': allTransaction})
@login_required(login_url='/login/')
def shop_due_report(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
allBalance = BuyerSellerAccount.objects.filter(seller=sr_object)
sell_transaction = Transaction.objects.filter(seller=sr_object)
dueTransactions = dueTransaction.objects.filter(seller=sr_object)
allBalanceIn = BuyerSellerAccount.objects.filter(buyer=sr_object)
sell_transactionIn = Transaction.objects.filter(buyer=sr_object)
dueTransactionsIn = dueTransaction.objects.filter(buyer=sr_object)
return render(request, 'pages/Shop/report_due.html', {'transcriber_name': transcriber_name,
'sell_transaction': sell_transaction,
'dueTransactions': dueTransactions,
'allBalance': allBalance,
'sell_transactionIn': sell_transactionIn,
'dueTransactionsIn': dueTransactionsIn,
'allBalanceIn': allBalanceIn})
@login_required(login_url='/login/')
def shop_report_sales_analysis(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
post_data = request.POST
print(post_data)
shop_object = sr_object
shop_name = shop_object.name
shop_id = shop_object.id
if 'month' in post_data and 'year' in post_data:
month = post_data['month']
year = post_data['year']
else:
month = datetime.date.today().month
year = datetime.date.today().year
return render(request, 'pages/Shop/report_sales_analysis.html', {'shop_name': shop_name,
# 'all_consumer_for_base' :all_consumer_for_base,
'shop_id': shop_id,
# 'bangla': bangla,
'transcriber_name': transcriber_name,
'month': month,
'year': year})
# Consumer Module
@login_required(login_url='/login/')
def user_monthly_report(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
# allTransaction = BuyerSellerAccount.objects.filter(seller=sr_object)
allTransactionIn = BuyerSellerAccount.objects.filter(buyer=sr_object)
return render(request, 'pages/Consumer/report_monthly.html', {'transcriber_name': transcriber_name,
'allTransactionIn': allTransactionIn})
@login_required(login_url='/login/')
def user_due_report(request):
sr_name = request.session['user']
sr_object = ACL.objects.get(loginID=sr_name).loginUser
transcriber_name = sr_object.name
# allBalance = BuyerSellerAccount.objects.filter(seller=sr_object)
# sell_transaction = Transaction.objects.filter(seller=sr_object)
# dueTransactions = dueTransaction.objects.filter(seller=sr_object)
allBalanceIn = BuyerSellerAccount.objects.filter(buyer=sr_object)
sell_transactionIn = Transaction.objects.filter(buyer=sr_object)
dueTransactionsIn = dueTransaction.objects.filter(buyer=sr_object)
return render(request, 'pages/Consumer/report_due.html', {'transcriber_name': transcriber_name,
# 'sell_transaction': sell_transaction,
# 'dueTransactions': dueTransactions,
# 'allBalance': allBalance,
'sell_transactionIn': sell_transactionIn,
'dueTransactionsIn': dueTransactionsIn,
'allBalanceIn': allBalanceIn})
@login_required(login_url='/login/')
def change_password(request):
# user = request.session['user']
post_data = request.POST
user_name = request.session['user']
user_object = ACL.objects.get(loginID=user_name).loginUser
transcriber_name = user_object.name
user = user_object.phone[-9:]
wrong = False
text = ''
if 'csrfmiddlewaretoken' in post_data:
if post_data['password'] == post_data['re-password']:
if User.objects.filter(username=user).exists():
u = User.objects.get(username=user)
u.set_password(post_data['password'])
u.save()
user_ID = user_object.id
this_user = Consumer.objects.get(id=user_ID)
this_user.number_of_child = 'CHANGED !!!'
this_user.save()
wrong = True
text = 'Password is successfully changed'
if user_object.type.type_name == 'Distributor':
display = render(request, 'pages/Distributor/index.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'SR':
display = render(request, 'pages/SR/index.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Seller':
display = render(request, 'pages/Shop/index.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Buyer':
display = render(request, 'pages/Consumer/index.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
else:
wrong = True
text = 'Something Wrong'
if user_object.type.type_name == 'Distributor':
display = render(request, 'pages/Distributor/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'SR':
display = render(request, 'pages/SR/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Seller':
display = render(request, 'pages/Shop/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Buyer':
display = render(request, 'pages/Consumer/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
else:
wrong = True
text = 'Passwords do NOT match. Please try again'
if user_object.type.type_name == 'Distributor':
display = render(request, 'pages/Distributor/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'SR':
display = render(request, 'pages/SR/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Seller':
display = render(request, 'pages/Shop/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Buyer':
display = render(request, 'pages/Consumer/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
else:
wrong = False
if user_object.type.type_name == 'Distributor':
display = render(request, 'pages/Distributor/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'SR':
display = render(request, 'pages/SR/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Seller':
display = render(request, 'pages/Shop/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong,
'text': text})
elif user_object.type.type_name == 'Buyer':
display = render(request, 'pages/Consumer/change_password.html', {'transcriber_name': transcriber_name,
'wrong': wrong})
return display<|fim▁end|> | |
<|file_name|>Enums.hpp<|end_file_name|><|fim▁begin|>/*
* File: Enums.hpp
* Author: Arthur Lisek-Koper <[email protected]>
*
* Created on December 28, 2014, 3:50 PM
*
* Summary of Code:
*
*/
#ifndef ENUMS_HPP
#define ENUMS_HPP
enum class frameType : int {
UNKNOWN = -1,
STAND_1 = 0,
STAND_2,
WALK_1,
WALK_2,
RUN,
JUMP_UP,
JUMP_DOWN,
ATTACK_1,
ATTACK_2,
SWIM,
FRAME_COUNT
};
enum class terrainShape {
UNKNOWN = -1,
SQUARE = 0,
CIRCLE,
NUM_SHAPES
};
enum class terrainType : int {
UNKNOWN = -1,
EMPTY = 0,
GROUND,
NUM_TYPES
};
enum class screenLayer : int {
TRUE_BACKGROUND = 0, //A single repeating image
FAR_BACKGROUND,
BACKGROUND ,
CENTER_BACKGROUND ,
CENTER,
CENTER_FOREGROUND,
FOREGROUND,
NEAR_FOREGROUND,
TRUE_FOREGROUND, //Things that do not move from the front of the screen
NUM_LAYERS
};
enum class objectType {
PLAYER = 0,
CREATURE,
TERRAIN,
BASIC_TERRAIN,
PLATFORM,
ITEM,
PROJECTILE,
ENEMY,
NPC,
UNKNOWN
};
enum class objectState {
GROUNDED = 0,
AIR,
WATER,
LADDER,<|fim▁hole|> CINEMATIC
};
enum class objectAction {
WALK = 0,
RUN,
CROUCH,
ATTACK,
JUMP,
STAND,
HARMED,
NONE
};
enum class errorType : int {
WARNING = 0,
CRITICAL,
};
enum class collisionSide {
TOP = 0,
BOTTOM,
LEFT,
RIGHT
};
enum class playerInput {
UP,
DOWN,
LEFT,
RIGHT,
ESC,
NUM_INPUTS
};
enum class gameState {
MAIN_MENU,
IN_WORLD,
PAUSED,
NUM_STATES
};
enum class windowState {
UNKNOWN = -1,
IN_FOCUS,
OUT_FOCUS,
RESIZE,
CLOSING,
NUM_STATES
};
enum class objComType {
UNKNOWN = -1,
ID = 0,
NAME,
TYPE,
SPRITE, //Has sprite?
SIZE,
SPEED,
INTERACT, //isInteractable?
COMMENT,
LOAD,
COUNT, //Specific to terrain.
SHAPE, //terrainShape
TERRAIN_TYPE, //terrainType
NUM_COMMANDS
};
#endif /* ENUMS_HPP */<|fim▁end|> | STAIRS, |
<|file_name|>SimpleDipEntityItem.java<|end_file_name|><|fim▁begin|>package yio.tro.antiyoy.menu.customizable_list;
import com.badlogic.gdx.graphics.g2d.BitmapFont;
import yio.tro.antiyoy.gameplay.diplomacy.DiplomaticEntity;
import yio.tro.antiyoy.menu.render.AbstractRenderCustomListItem;
import yio.tro.antiyoy.menu.render.MenuRender;
import yio.tro.antiyoy.menu.scenes.Scenes;
import yio.tro.antiyoy.menu.scenes.gameplay.choose_entity.IDipEntityReceiver;
import yio.tro.antiyoy.stuff.Fonts;
import yio.tro.antiyoy.stuff.GraphicsYio;
public class SimpleDipEntityItem extends AbstractSingleLineItem{
<|fim▁hole|>
@Override
protected BitmapFont getTitleFont() {
return Fonts.smallerMenuFont;
}
@Override
protected double getHeight() {
return 0.07f * GraphicsYio.height;
}
@Override
protected void onClicked() {
Scenes.sceneChooseDiplomaticEntity.onDiplomaticEntityChosen(diplomaticEntity);
}
public void setDiplomaticEntity(DiplomaticEntity diplomaticEntity) {
this.diplomaticEntity = diplomaticEntity;
backgroundColor = getGameController().colorsManager.getColorByFraction(diplomaticEntity.fraction);
setTitle("" + diplomaticEntity.capitalName);
}
@Override
public AbstractRenderCustomListItem getRender() {
return MenuRender.renderSimpleDipEntityItem;
}
}<|fim▁end|> | public DiplomaticEntity diplomaticEntity;
public int backgroundColor;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# $Id:$
import ctypes
import math
import sys
import threading
import time
import pyglet
_debug = pyglet.options['debug_media']
import mt_media
from . import lib_dsound as lib
from pyglet.window.win32 import user32, kernel32
class DirectSoundException(mt_media.MediaException):
pass
def _db(gain):
"""Convert linear gain in range [0.0, 1.0] to 100ths of dB."""
if gain <= 0:
return -10000
return max(-10000, min(int(1000 * math.log(min(gain, 1))), 0))
class DirectSoundWorker(mt_media.MediaThread):<|fim▁hole|>
# Time to wait if there are players, but they're all full.
_nap_time = 0.05
# Time to wait if there are no players.
_sleep_time = None
def __init__(self):
super().__init__()
self.players = set()
def run(self):
while True:
# This is a big lock, but ensures a player is not deleted while
# we're processing it -- this saves on extra checks in the
# player's methods that would otherwise have to check that it's
# still alive.
if _debug:
print('DirectSoundWorker run attempt acquire')
self.condition.acquire()
if _debug:
print('DirectSoundWorker run acquire')
if self.stopped:
self.condition.release()
break
sleep_time = -1
if self.players:
player = None
write_size = 0
for p in self.players:
s = p.get_write_size()
if s > write_size:
player = p
write_size = s
if write_size > self._min_write_size:
player.refill(write_size)
else:
sleep_time = self._nap_time
else:
sleep_time = self._sleep_time
self.condition.release()
if _debug:
print('DirectSoundWorker run release')
if sleep_time != -1:
self.sleep(sleep_time)
if _debug:
print('DirectSoundWorker exiting')
def add(self, player):
if _debug:
print('DirectSoundWorker add', player)
self.condition.acquire()
self.players.add(player)
self.condition.notify()
self.condition.release()
if _debug:
print('return DirectSoundWorker add', player)
def remove(self, player):
if _debug:
print('DirectSoundWorker remove', player)
self.condition.acquire()
try:
self.players.remove(player)
except KeyError:
pass
self.condition.notify()
self.condition.release()
if _debug:
print('return DirectSoundWorker remove', player)
class DirectSoundAudioPlayer(mt_media.AbstractAudioPlayer):
# How many bytes the ring buffer should be
_buffer_size = 44800 * 1
# Need to cache these because pyglet API allows update separately, but
# DSound requires both to be set at once.
_cone_inner_angle = 360
_cone_outer_angle = 360
def __init__(self, source_group, player):
super().__init__(source_group, player)
# Locking strategy:
# All DirectSound calls should be locked. All instance vars relating
# to buffering/filling/time/events should be locked (used by both
# application and worker thread). Other instance vars (consts and
# 3d vars) do not need to be locked.
self._lock = threading.RLock()
# Desired play state (may be actually paused due to underrun -- not
# implemented yet).
self._playing = False
# Up to one audio data may be buffered if too much data was received
# from the source that could not be written immediately into the
# buffer. See refill().
self._next_audio_data = None
# Theoretical write and play cursors for an infinite buffer. play
# cursor is always <= write cursor (when equal, underrun is
# happening).
self._write_cursor = 0
self._play_cursor = 0
# Cursor position of end of data. Silence is written after
# eos for one buffer size.
self._eos_cursor = None
# Indexes into DSound circular buffer. Complications ensue wrt each
# other to avoid writing over the play cursor. See get_write_size and
# write().
self._play_cursor_ring = 0
self._write_cursor_ring = 0
# List of (play_cursor, MediaEvent), in sort order
self._events = list()
# List of (cursor, timestamp), in sort order (cursor gives expiry
# place of the timestamp)
self._timestamps = list()
audio_format = source_group.audio_format
wfx = lib.WAVEFORMATEX()
wfx.wFormatTag = lib.WAVE_FORMAT_PCM
wfx.nChannels = audio_format.channels
wfx.nSamplesPerSec = audio_format.sample_rate
wfx.wBitsPerSample = audio_format.sample_size
wfx.nBlockAlign = wfx.wBitsPerSample * wfx.nChannels // 8
wfx.nAvgBytesPerSec = wfx.nSamplesPerSec * wfx.nBlockAlign
dsbdesc = lib.DSBUFFERDESC()
dsbdesc.dwSize = ctypes.sizeof(dsbdesc)
dsbdesc.dwFlags = (lib.DSBCAPS_GLOBALFOCUS |
lib.DSBCAPS_GETCURRENTPOSITION2 |
lib.DSBCAPS_CTRLFREQUENCY |
lib.DSBCAPS_CTRLVOLUME)
if audio_format.channels == 1:
dsbdesc.dwFlags |= lib.DSBCAPS_CTRL3D
dsbdesc.dwBufferBytes = self._buffer_size
dsbdesc.lpwfxFormat = ctypes.pointer(wfx)
# DSound buffer
self._buffer = lib.IDirectSoundBuffer()
driver._dsound.CreateSoundBuffer(dsbdesc,
ctypes.byref(self._buffer),
None)
if audio_format.channels == 1:
self._buffer3d = lib.IDirectSound3DBuffer()
self._buffer.QueryInterface(lib.IID_IDirectSound3DBuffer,
ctypes.byref(self._buffer3d))
else:
self._buffer3d = None
self._buffer.SetCurrentPosition(0)
self.refill(self._buffer_size)
def __del__(self):
try:
self.delete()
except:
pass
def delete(self):
if driver and driver.worker:
driver.worker.remove(self)
self.lock()
self._buffer.Stop()
self._buffer.Release()
self._buffer = None
if self._buffer3d:
self._buffer3d.Release()
self._buffer3d = None
self.unlock()
def lock(self):
self._lock.acquire()
def unlock(self):
self._lock.release()
def play(self):
if _debug:
print('DirectSound play')
driver.worker.add(self)
self.lock()
if not self._playing:
self._playing = True
self._buffer.Play(0, 0, lib.DSBPLAY_LOOPING)
self.unlock()
if _debug:
print('return DirectSound play')
def stop(self):
if _debug:
print('DirectSound stop')
driver.worker.remove(self)
self.lock()
if self._playing:
self._playing = False
self._buffer.Stop()
self.unlock()
if _debug:
print('return DirectSound stop')
def clear(self):
if _debug:
print('DirectSound clear')
self.lock()
self._buffer.SetCurrentPosition(0)
self._play_cursor_ring = self._write_cursor_ring = 0
self._play_cursor = self._write_cursor
self._eos_cursor = None
self._next_audio_data = None
del self._events[:]
del self._timestamps[:]
self.unlock()
def refill(self, write_size):
self.lock()
while write_size > 0:
if _debug:
print('refill, write_size =', write_size)
# Get next audio packet (or remains of last one)
if self._next_audio_data:
audio_data = self._next_audio_data
self._next_audio_data = None
else:
audio_data = self.source_group.get_audio_data(write_size)
# Write it, or silence if there are no more packets
if audio_data:
# Add events
for event in audio_data.events:
event_cursor = self._write_cursor + event.timestamp * \
self.source_group.audio_format.bytes_per_second
self._events.append((event_cursor, event))
# Add timestamp (at end of this data packet)
ts_cursor = self._write_cursor + audio_data.length
self._timestamps.append(
(ts_cursor, audio_data.timestamp + audio_data.duration))
# Write data
if _debug:
print('write', audio_data.length)
length = min(write_size, audio_data.length)
self.write(audio_data, length)
if audio_data.length:
self._next_audio_data = audio_data
write_size -= length
else:
# Write silence
if self._eos_cursor is None:
self._eos_cursor = self._write_cursor
self._events.append(
(self._eos_cursor,
mt_media.MediaEvent(0, 'on_eos')))
self._events.append(
(self._eos_cursor,
mt_media.MediaEvent(0, 'on_source_group_eos')))
self._events.sort()
if self._write_cursor > self._eos_cursor + self._buffer_size:
self.stop()
else:
self.write(None, write_size)
write_size = 0
self.unlock()
def update_play_cursor(self):
self.lock()
play_cursor_ring = lib.DWORD()
self._buffer.GetCurrentPosition(play_cursor_ring, None)
if play_cursor_ring.value < self._play_cursor_ring:
# Wrapped around
self._play_cursor += self._buffer_size - self._play_cursor_ring
self._play_cursor_ring = 0
self._play_cursor += play_cursor_ring.value - self._play_cursor_ring
self._play_cursor_ring = play_cursor_ring.value
# Dispatch pending events
pending_events = list()
while self._events and self._events[0][0] <= self._play_cursor:
_, event = self._events.pop(0)
pending_events.append(event)
if _debug:
print('Dispatching pending events:', pending_events)
print('Remaining events:', self._events)
# Remove expired timestamps
while self._timestamps and self._timestamps[0][0] < self._play_cursor:
del self._timestamps[0]
self.unlock()
for event in pending_events:
event._sync_dispatch_to_player(self.player)
def get_write_size(self):
self.update_play_cursor()
self.lock()
play_cursor = self._play_cursor
write_cursor = self._write_cursor
self.unlock()
return self._buffer_size - (write_cursor - play_cursor)
def write(self, audio_data, length):
# Pass audio_data=None to write silence
if length == 0:
return 0
self.lock()
p1 = ctypes.c_void_p()
l1 = lib.DWORD()
p2 = ctypes.c_void_p()
l2 = lib.DWORD()
self._buffer.Lock(self._write_cursor_ring, length,
ctypes.byref(p1), l1, ctypes.byref(p2), l2, 0)
assert length == l1.value + l2.value
if audio_data:
ctypes.memmove(p1, audio_data.data, l1.value)
audio_data.consume(l1.value, self.source_group.audio_format)
if l2.value:
ctypes.memmove(p2, audio_data.data, l2.value)
audio_data.consume(l2.value, self.source_group.audio_format)
else:
ctypes.memset(p1, 0, l1.value)
if l2.value:
ctypes.memset(p2, 0, l2.value)
self._buffer.Unlock(p1, l1, p2, l2)
self._write_cursor += length
self._write_cursor_ring += length
self._write_cursor_ring %= self._buffer_size
self.unlock()
def get_time(self):
self.lock()
if self._timestamps:
cursor, ts = self._timestamps[0]
result = ts + (self._play_cursor - cursor) / \
float(self.source_group.audio_format.bytes_per_second)
else:
result = None
self.unlock()
return result
def set_volume(self, volume):
volume = _db(volume)
self.lock()
self._buffer.SetVolume(volume)
self.unlock()
def set_position(self, position):
if self._buffer3d:
x, y, z = position
self.lock()
self._buffer3d.SetPosition(x, y, -z, lib.DS3D_IMMEDIATE)
self.unlock()
def set_min_distance(self, min_distance):
if self._buffer3d:
self.lock()
self._buffer3d.SetMinDistance(min_distance, lib.DS3D_IMMEDIATE)
self.unlock()
def set_max_distance(self, max_distance):
if self._buffer3d:
self.lock()
self._buffer3d.SetMaxDistance(max_distance, lib.DS3D_IMMEDIATE)
self.unlock()
def set_pitch(self, pitch):
frequency = int(pitch * self.audio_format.sample_rate)
self.lock()
self._buffer.SetFrequency(frequency)
self.unlock()
def set_cone_orientation(self, cone_orientation):
if self._buffer3d:
x, y, z = cone_orientation
self.lock()
self._buffer3d.SetConeOrientation(x, y, -z, lib.DS3D_IMMEDIATE)
self.unlock()
def set_cone_inner_angle(self, cone_inner_angle):
if self._buffer3d:
self._cone_inner_angle = int(cone_inner_angle)
self._set_cone_angles()
def set_cone_outer_angle(self, cone_outer_angle):
if self._buffer3d:
self._cone_outer_angle = int(cone_outer_angle)
self._set_cone_angles()
def _set_cone_angles(self):
inner = min(self._cone_inner_angle, self._cone_outer_angle)
outer = max(self._cone_inner_angle, self._cone_outer_angle)
self.lock()
self._buffer3d.SetConeAngles(inner, outer, lib.DS3D_IMMEDIATE)
self.unlock()
def set_cone_outer_gain(self, cone_outer_gain):
if self._buffer3d:
volume = _db(cone_outer_gain)
self.lock()
self._buffer3d.SetConeOutsideVolume(volume, lib.DS3D_IMMEDIATE)
self.unlock()
class DirectSoundDriver(mt_media.AbstractAudioDriver):
def __init__(self):
self._dsound = lib.IDirectSound()
lib.DirectSoundCreate(None, ctypes.byref(self._dsound), None)
# A trick used by mplayer.. use desktop as window handle since it
# would be complex to use pyglet window handles (and what to do when
# application is audio only?).
hwnd = user32.GetDesktopWindow()
self._dsound.SetCooperativeLevel(hwnd, lib.DSSCL_NORMAL)
# Create primary buffer with 3D and volume capabilities
self._buffer = lib.IDirectSoundBuffer()
dsbd = lib.DSBUFFERDESC()
dsbd.dwSize = ctypes.sizeof(dsbd)
dsbd.dwFlags = (lib.DSBCAPS_CTRL3D |
lib.DSBCAPS_CTRLVOLUME |
lib.DSBCAPS_PRIMARYBUFFER)
self._dsound.CreateSoundBuffer(dsbd, ctypes.byref(self._buffer), None)
# Create listener
self._listener = lib.IDirectSound3DListener()
self._buffer.QueryInterface(lib.IID_IDirectSound3DListener,
ctypes.byref(self._listener))
# Create worker thread
self.worker = DirectSoundWorker()
self.worker.start()
def __del__(self):
try:
if self._buffer:
self.delete()
except:
pass
def create_audio_player(self, source_group, player):
return DirectSoundAudioPlayer(source_group, player)
def delete(self):
self.worker.stop()
self._buffer.Release()
self._buffer = None
self._listener.Release()
self._listener = None
# Listener API
def _set_volume(self, volume):
self._volume = volume
self._buffer.SetVolume(_db(volume))
def _set_position(self, position):
self._position = position
x, y, z = position
self._listener.SetPosition(x, y, -z, lib.DS3D_IMMEDIATE)
def _set_forward_orientation(self, orientation):
self._forward_orientation = orientation
self._set_orientation()
def _set_up_orientation(self, orientation):
self._up_orientation = orientation
self._set_orientation()
def _set_orientation(self):
x, y, z = self._forward_orientation
ux, uy, uz = self._up_orientation
self._listener.SetOrientation(
x, y, -z, ux, uy, -uz, lib.DS3D_IMMEDIATE)
def create_audio_driver():
global driver
driver = DirectSoundDriver()
return driver
# Global driver needed for access to worker thread and _dsound
driver = None<|fim▁end|> | _min_write_size = 9600 |
<|file_name|>AndTypeImpl.java<|end_file_name|><|fim▁begin|>/**
*/
package org.liquibase.xml.ns.dbchangelog.impl;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.MinimalEObjectImpl;
import org.eclipse.emf.ecore.util.BasicFeatureMap;
import org.eclipse.emf.ecore.util.FeatureMap;
import org.eclipse.emf.ecore.util.InternalEList;
import org.liquibase.xml.ns.dbchangelog.AndType;
import org.liquibase.xml.ns.dbchangelog.ChangeLogPropertyDefinedType;
import org.liquibase.xml.ns.dbchangelog.ChangeSetExecutedType;
import org.liquibase.xml.ns.dbchangelog.ColumnExistsType;
import org.liquibase.xml.ns.dbchangelog.CustomPreconditionType;
import org.liquibase.xml.ns.dbchangelog.DbchangelogPackage;
import org.liquibase.xml.ns.dbchangelog.DbmsType;
import org.liquibase.xml.ns.dbchangelog.ExpectedQuotingStrategyType;
import org.liquibase.xml.ns.dbchangelog.ForeignKeyConstraintExistsType;
import org.liquibase.xml.ns.dbchangelog.IndexExistsType;
import org.liquibase.xml.ns.dbchangelog.NotType;
import org.liquibase.xml.ns.dbchangelog.OrType;
import org.liquibase.xml.ns.dbchangelog.PrimaryKeyExistsType;
import org.liquibase.xml.ns.dbchangelog.RowCountType;
import org.liquibase.xml.ns.dbchangelog.RunningAsType;
import org.liquibase.xml.ns.dbchangelog.SequenceExistsType;
import org.liquibase.xml.ns.dbchangelog.SqlCheckType;
import org.liquibase.xml.ns.dbchangelog.TableExistsType;
import org.liquibase.xml.ns.dbchangelog.TableIsEmptyType;
import org.liquibase.xml.ns.dbchangelog.ViewExistsType;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>And Type</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getGroup <em>Group</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getAnd <em>And</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getOr <em>Or</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getNot <em>Not</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getDbms <em>Dbms</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getRunningAs <em>Running As</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getChangeSetExecuted <em>Change Set Executed</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getTableExists <em>Table Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getColumnExists <em>Column Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getSequenceExists <em>Sequence Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getForeignKeyConstraintExists <em>Foreign Key Constraint Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getIndexExists <em>Index Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getPrimaryKeyExists <em>Primary Key Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getViewExists <em>View Exists</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getTableIsEmpty <em>Table Is Empty</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getRowCount <em>Row Count</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getSqlCheck <em>Sql Check</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getChangeLogPropertyDefined <em>Change Log Property Defined</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getExpectedQuotingStrategy <em>Expected Quoting Strategy</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getCustomPrecondition <em>Custom Precondition</em>}</li>
* <li>{@link org.liquibase.xml.ns.dbchangelog.impl.AndTypeImpl#getAny <em>Any</em>}</li>
* </ul>
*
* @generated
*/
public class AndTypeImpl extends MinimalEObjectImpl.Container implements AndType {
/**
* The cached value of the '{@link #getGroup() <em>Group</em>}' attribute list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getGroup()
* @generated
* @ordered
*/
protected FeatureMap group;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected AndTypeImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return DbchangelogPackage.eINSTANCE.getAndType();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public FeatureMap getGroup() {
if (group == null) {
group = new BasicFeatureMap(this, DbchangelogPackage.AND_TYPE__GROUP);
}
return group;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<AndType> getAnd() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_And());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<OrType> getOr() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_Or());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<NotType> getNot() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_Not());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<DbmsType> getDbms() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_Dbms());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<RunningAsType> getRunningAs() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_RunningAs());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ChangeSetExecutedType> getChangeSetExecuted() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_ChangeSetExecuted());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TableExistsType> getTableExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_TableExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ColumnExistsType> getColumnExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_ColumnExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<SequenceExistsType> getSequenceExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_SequenceExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ForeignKeyConstraintExistsType> getForeignKeyConstraintExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_ForeignKeyConstraintExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<IndexExistsType> getIndexExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_IndexExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<PrimaryKeyExistsType> getPrimaryKeyExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_PrimaryKeyExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ViewExistsType> getViewExists() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_ViewExists());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<TableIsEmptyType> getTableIsEmpty() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_TableIsEmpty());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<RowCountType> getRowCount() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_RowCount());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<SqlCheckType> getSqlCheck() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_SqlCheck());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ChangeLogPropertyDefinedType> getChangeLogPropertyDefined() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_ChangeLogPropertyDefined());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<ExpectedQuotingStrategyType> getExpectedQuotingStrategy() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_ExpectedQuotingStrategy());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList<CustomPreconditionType> getCustomPrecondition() {
return getGroup().list(DbchangelogPackage.eINSTANCE.getAndType_CustomPrecondition());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public FeatureMap getAny() {
return (FeatureMap)getGroup().<FeatureMap.Entry>list(DbchangelogPackage.eINSTANCE.getAndType_Any());
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case DbchangelogPackage.AND_TYPE__GROUP:
return ((InternalEList<?>)getGroup()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__AND:
return ((InternalEList<?>)getAnd()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__OR:
return ((InternalEList<?>)getOr()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__NOT:
return ((InternalEList<?>)getNot()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__DBMS:
return ((InternalEList<?>)getDbms()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__RUNNING_AS:
return ((InternalEList<?>)getRunningAs()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__CHANGE_SET_EXECUTED:
return ((InternalEList<?>)getChangeSetExecuted()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__TABLE_EXISTS:
return ((InternalEList<?>)getTableExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__COLUMN_EXISTS:
return ((InternalEList<?>)getColumnExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__SEQUENCE_EXISTS:
return ((InternalEList<?>)getSequenceExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__FOREIGN_KEY_CONSTRAINT_EXISTS:
return ((InternalEList<?>)getForeignKeyConstraintExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__INDEX_EXISTS:
return ((InternalEList<?>)getIndexExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__PRIMARY_KEY_EXISTS:
return ((InternalEList<?>)getPrimaryKeyExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__VIEW_EXISTS:
return ((InternalEList<?>)getViewExists()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__TABLE_IS_EMPTY:
return ((InternalEList<?>)getTableIsEmpty()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__ROW_COUNT:
return ((InternalEList<?>)getRowCount()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__SQL_CHECK:
return ((InternalEList<?>)getSqlCheck()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__CHANGE_LOG_PROPERTY_DEFINED:
return ((InternalEList<?>)getChangeLogPropertyDefined()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__EXPECTED_QUOTING_STRATEGY:
return ((InternalEList<?>)getExpectedQuotingStrategy()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__CUSTOM_PRECONDITION:
return ((InternalEList<?>)getCustomPrecondition()).basicRemove(otherEnd, msgs);
case DbchangelogPackage.AND_TYPE__ANY:
return ((InternalEList<?>)getAny()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case DbchangelogPackage.AND_TYPE__GROUP:
if (coreType) return getGroup();
return ((FeatureMap.Internal)getGroup()).getWrapper();
case DbchangelogPackage.AND_TYPE__AND:
return getAnd();
case DbchangelogPackage.AND_TYPE__OR:
return getOr();
case DbchangelogPackage.AND_TYPE__NOT:
return getNot();
case DbchangelogPackage.AND_TYPE__DBMS:
return getDbms();
case DbchangelogPackage.AND_TYPE__RUNNING_AS:
return getRunningAs();
case DbchangelogPackage.AND_TYPE__CHANGE_SET_EXECUTED:
return getChangeSetExecuted();
case DbchangelogPackage.AND_TYPE__TABLE_EXISTS:
return getTableExists();
case DbchangelogPackage.AND_TYPE__COLUMN_EXISTS:
return getColumnExists();
case DbchangelogPackage.AND_TYPE__SEQUENCE_EXISTS:
return getSequenceExists();
case DbchangelogPackage.AND_TYPE__FOREIGN_KEY_CONSTRAINT_EXISTS:
return getForeignKeyConstraintExists();
case DbchangelogPackage.AND_TYPE__INDEX_EXISTS:
return getIndexExists();
case DbchangelogPackage.AND_TYPE__PRIMARY_KEY_EXISTS:
return getPrimaryKeyExists();
case DbchangelogPackage.AND_TYPE__VIEW_EXISTS:
return getViewExists();
case DbchangelogPackage.AND_TYPE__TABLE_IS_EMPTY:
return getTableIsEmpty();
case DbchangelogPackage.AND_TYPE__ROW_COUNT:
return getRowCount();
case DbchangelogPackage.AND_TYPE__SQL_CHECK:
return getSqlCheck();
case DbchangelogPackage.AND_TYPE__CHANGE_LOG_PROPERTY_DEFINED:
return getChangeLogPropertyDefined();
case DbchangelogPackage.AND_TYPE__EXPECTED_QUOTING_STRATEGY:
return getExpectedQuotingStrategy();
case DbchangelogPackage.AND_TYPE__CUSTOM_PRECONDITION:
return getCustomPrecondition();
case DbchangelogPackage.AND_TYPE__ANY:
if (coreType) return getAny();
return ((FeatureMap.Internal)getAny()).getWrapper();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
<|fim▁hole|> case DbchangelogPackage.AND_TYPE__AND:
getAnd().clear();
getAnd().addAll((Collection<? extends AndType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__OR:
getOr().clear();
getOr().addAll((Collection<? extends OrType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__NOT:
getNot().clear();
getNot().addAll((Collection<? extends NotType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__DBMS:
getDbms().clear();
getDbms().addAll((Collection<? extends DbmsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__RUNNING_AS:
getRunningAs().clear();
getRunningAs().addAll((Collection<? extends RunningAsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__CHANGE_SET_EXECUTED:
getChangeSetExecuted().clear();
getChangeSetExecuted().addAll((Collection<? extends ChangeSetExecutedType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__TABLE_EXISTS:
getTableExists().clear();
getTableExists().addAll((Collection<? extends TableExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__COLUMN_EXISTS:
getColumnExists().clear();
getColumnExists().addAll((Collection<? extends ColumnExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__SEQUENCE_EXISTS:
getSequenceExists().clear();
getSequenceExists().addAll((Collection<? extends SequenceExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__FOREIGN_KEY_CONSTRAINT_EXISTS:
getForeignKeyConstraintExists().clear();
getForeignKeyConstraintExists().addAll((Collection<? extends ForeignKeyConstraintExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__INDEX_EXISTS:
getIndexExists().clear();
getIndexExists().addAll((Collection<? extends IndexExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__PRIMARY_KEY_EXISTS:
getPrimaryKeyExists().clear();
getPrimaryKeyExists().addAll((Collection<? extends PrimaryKeyExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__VIEW_EXISTS:
getViewExists().clear();
getViewExists().addAll((Collection<? extends ViewExistsType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__TABLE_IS_EMPTY:
getTableIsEmpty().clear();
getTableIsEmpty().addAll((Collection<? extends TableIsEmptyType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__ROW_COUNT:
getRowCount().clear();
getRowCount().addAll((Collection<? extends RowCountType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__SQL_CHECK:
getSqlCheck().clear();
getSqlCheck().addAll((Collection<? extends SqlCheckType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__CHANGE_LOG_PROPERTY_DEFINED:
getChangeLogPropertyDefined().clear();
getChangeLogPropertyDefined().addAll((Collection<? extends ChangeLogPropertyDefinedType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__EXPECTED_QUOTING_STRATEGY:
getExpectedQuotingStrategy().clear();
getExpectedQuotingStrategy().addAll((Collection<? extends ExpectedQuotingStrategyType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__CUSTOM_PRECONDITION:
getCustomPrecondition().clear();
getCustomPrecondition().addAll((Collection<? extends CustomPreconditionType>)newValue);
return;
case DbchangelogPackage.AND_TYPE__ANY:
((FeatureMap.Internal)getAny()).set(newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case DbchangelogPackage.AND_TYPE__GROUP:
getGroup().clear();
return;
case DbchangelogPackage.AND_TYPE__AND:
getAnd().clear();
return;
case DbchangelogPackage.AND_TYPE__OR:
getOr().clear();
return;
case DbchangelogPackage.AND_TYPE__NOT:
getNot().clear();
return;
case DbchangelogPackage.AND_TYPE__DBMS:
getDbms().clear();
return;
case DbchangelogPackage.AND_TYPE__RUNNING_AS:
getRunningAs().clear();
return;
case DbchangelogPackage.AND_TYPE__CHANGE_SET_EXECUTED:
getChangeSetExecuted().clear();
return;
case DbchangelogPackage.AND_TYPE__TABLE_EXISTS:
getTableExists().clear();
return;
case DbchangelogPackage.AND_TYPE__COLUMN_EXISTS:
getColumnExists().clear();
return;
case DbchangelogPackage.AND_TYPE__SEQUENCE_EXISTS:
getSequenceExists().clear();
return;
case DbchangelogPackage.AND_TYPE__FOREIGN_KEY_CONSTRAINT_EXISTS:
getForeignKeyConstraintExists().clear();
return;
case DbchangelogPackage.AND_TYPE__INDEX_EXISTS:
getIndexExists().clear();
return;
case DbchangelogPackage.AND_TYPE__PRIMARY_KEY_EXISTS:
getPrimaryKeyExists().clear();
return;
case DbchangelogPackage.AND_TYPE__VIEW_EXISTS:
getViewExists().clear();
return;
case DbchangelogPackage.AND_TYPE__TABLE_IS_EMPTY:
getTableIsEmpty().clear();
return;
case DbchangelogPackage.AND_TYPE__ROW_COUNT:
getRowCount().clear();
return;
case DbchangelogPackage.AND_TYPE__SQL_CHECK:
getSqlCheck().clear();
return;
case DbchangelogPackage.AND_TYPE__CHANGE_LOG_PROPERTY_DEFINED:
getChangeLogPropertyDefined().clear();
return;
case DbchangelogPackage.AND_TYPE__EXPECTED_QUOTING_STRATEGY:
getExpectedQuotingStrategy().clear();
return;
case DbchangelogPackage.AND_TYPE__CUSTOM_PRECONDITION:
getCustomPrecondition().clear();
return;
case DbchangelogPackage.AND_TYPE__ANY:
getAny().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case DbchangelogPackage.AND_TYPE__GROUP:
return group != null && !group.isEmpty();
case DbchangelogPackage.AND_TYPE__AND:
return !getAnd().isEmpty();
case DbchangelogPackage.AND_TYPE__OR:
return !getOr().isEmpty();
case DbchangelogPackage.AND_TYPE__NOT:
return !getNot().isEmpty();
case DbchangelogPackage.AND_TYPE__DBMS:
return !getDbms().isEmpty();
case DbchangelogPackage.AND_TYPE__RUNNING_AS:
return !getRunningAs().isEmpty();
case DbchangelogPackage.AND_TYPE__CHANGE_SET_EXECUTED:
return !getChangeSetExecuted().isEmpty();
case DbchangelogPackage.AND_TYPE__TABLE_EXISTS:
return !getTableExists().isEmpty();
case DbchangelogPackage.AND_TYPE__COLUMN_EXISTS:
return !getColumnExists().isEmpty();
case DbchangelogPackage.AND_TYPE__SEQUENCE_EXISTS:
return !getSequenceExists().isEmpty();
case DbchangelogPackage.AND_TYPE__FOREIGN_KEY_CONSTRAINT_EXISTS:
return !getForeignKeyConstraintExists().isEmpty();
case DbchangelogPackage.AND_TYPE__INDEX_EXISTS:
return !getIndexExists().isEmpty();
case DbchangelogPackage.AND_TYPE__PRIMARY_KEY_EXISTS:
return !getPrimaryKeyExists().isEmpty();
case DbchangelogPackage.AND_TYPE__VIEW_EXISTS:
return !getViewExists().isEmpty();
case DbchangelogPackage.AND_TYPE__TABLE_IS_EMPTY:
return !getTableIsEmpty().isEmpty();
case DbchangelogPackage.AND_TYPE__ROW_COUNT:
return !getRowCount().isEmpty();
case DbchangelogPackage.AND_TYPE__SQL_CHECK:
return !getSqlCheck().isEmpty();
case DbchangelogPackage.AND_TYPE__CHANGE_LOG_PROPERTY_DEFINED:
return !getChangeLogPropertyDefined().isEmpty();
case DbchangelogPackage.AND_TYPE__EXPECTED_QUOTING_STRATEGY:
return !getExpectedQuotingStrategy().isEmpty();
case DbchangelogPackage.AND_TYPE__CUSTOM_PRECONDITION:
return !getCustomPrecondition().isEmpty();
case DbchangelogPackage.AND_TYPE__ANY:
return !getAny().isEmpty();
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public String toString() {
if (eIsProxy()) return super.toString();
StringBuilder result = new StringBuilder(super.toString());
result.append(" (group: ");
result.append(group);
result.append(')');
return result.toString();
}
} //AndTypeImpl<|fim▁end|> | case DbchangelogPackage.AND_TYPE__GROUP:
((FeatureMap.Internal)getGroup()).set(newValue);
return;
|
<|file_name|>report.py<|end_file_name|><|fim▁begin|>import os
from django_extensions.management.jobs import DailyJob
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert import HTMLExporter<|fim▁hole|>
class Job(DailyJob):
help = "Convert Jupyter Notebook in lowfat/reports to HTML page in lowfat/reports/html."
def execute(self):
print("Cleaning lowfat/reports/html ...")
old_reports = os.listdir("lowfat/reports/html")
for old_report in old_reports:
print("- Removing lowfat/reports/html/{}".format(old_report))
os.remove("lowfat/reports/html/{}".format(old_report))
print("Cleaning of lowfat/reports/html is complete.")
notebook_filenames = os.listdir("lowfat/reports")
for notebook_filename in notebook_filenames:
if not notebook_filename.endswith(".ipynb"):
continue
print("Processing lowfat/reports/{}".format(notebook_filename))
# Based on Executing notebooks, nbconvert Documentation by Jupyter Development Team.
# https://nbconvert.readthedocs.io/en/latest/execute_api.html
with open("lowfat/reports/{}".format(notebook_filename)) as file_:
notebook = nbformat.read(file_, as_version=4)
# Kernel is provided by https://github.com/django-extensions/django-extensions/
execute_preprocessor = ExecutePreprocessor(timeout=600, kernel_name='django_extensions')
execute_preprocessor.preprocess(notebook, {'metadata': {'path': '.'}})
html_exporter = HTMLExporter()
html_exporter.template_file = 'basic'
(body, dummy_resources) = html_exporter.from_notebook_node(notebook)
with open('lowfat/reports/html/{}.html'.format(notebook_filename), 'wt') as file_:
file_.write(body)<|fim▁end|> | |
<|file_name|>rate_scan.py<|end_file_name|><|fim▁begin|>"""
Make a "Bias Curve" or perform a "Rate-scan",
i.e. measure the trigger rate as a function of threshold.<|fim▁hole|>Usage:
digicam-rate-scan [options] [--] <INPUT>...
Options:
--display Display the plots
--compute Computes the trigger rate vs threshold
-o OUTPUT --output=OUTPUT. Folder where to store the results.
[default: ./rate_scan.fits]
-i INPUT --input=INPUT. Input files.
--threshold_start=N Trigger threshold start
[default: 0]
--threshold_end=N Trigger threshold end
[default: 4095]
--threshold_step=N Trigger threshold step
[default: 5]
--n_samples=N Number of pre-samples used by DigiCam to compute
baseline
[default: 1024]
--figure_path=OUTPUT Figure path
[default: None]
"""
import matplotlib.pyplot as plt
import numpy as np
from docopt import docopt
import fitsio
import pandas as pd
from digicampipe.calib import filters
from digicampipe.calib import trigger, baseline
from digicampipe.calib.trigger import compute_bias_curve
from digicampipe.io.event_stream import event_stream
from digicampipe.io.containers import CameraEventType
def compute(files, output_filename, thresholds, n_samples=1024):
thresholds = thresholds.astype(float)
data_stream = event_stream(files)
# data_stream = trigger.fill_event_type(data_stream, flag=8)
data_stream = filters.filter_event_types(data_stream,
flags=CameraEventType.INTERNAL)
data_stream = baseline.fill_baseline_r0(data_stream, n_bins=n_samples)
data_stream = filters.filter_missing_baseline(data_stream)
data_stream = trigger.fill_trigger_patch(data_stream)
data_stream = trigger.fill_trigger_input_7(data_stream)
data_stream = trigger.fill_trigger_input_19(data_stream)
output = compute_bias_curve(
data_stream,
thresholds=thresholds,
)
rate, rate_error, cluster_rate, cluster_rate_error, thresholds, \
start_event_id, end_event_id, start_event_time, end_event_time = output
with fitsio.FITS(output_filename, mode='rw', clobber=True) as f:
f.write([np.array([start_event_id, end_event_id]),
np.array([start_event_time, end_event_time])],
extname='meta',
names=['event_id', 'time'])
f.write(thresholds, extname='threshold', compress='gzip')
f.write([rate, rate_error], extname='camera', names=['rate', 'error'],
compress='gzip')
f.write([cluster_rate, cluster_rate_error], names=['rate', 'error'],
extname='cluster',
compress='gzip')
return output
def entry():
args = docopt(__doc__)
input_files = args['<INPUT>']
output_file = args['--output']
start = float(args['--threshold_start'])
end = float(args['--threshold_end'])
step = float(args['--threshold_step'])
thresholds = np.arange(start, end + step, step)
n_samples = int(args['--n_samples'])
figure_path = args['--figure_path']
figure_path = None if figure_path == 'None' else figure_path
if args['--compute']:
compute(input_files, output_file, thresholds=thresholds,
n_samples=n_samples)
if args['--display'] or figure_path is not None:
with fitsio.FITS(output_file, 'r') as f:
meta = f['meta']
id = meta['event_id'].read()
time = meta['time'].read()
start_id, end_id = id
start_time, end_time = time
thresholds = f['threshold'].read()
camera_rate = f['camera']['rate'].read()
camera_rate_error = f['camera']['error'].read()
cluster_rate = f['cluster']['rate'].read()
cluster_rate_error = f['cluster']['error'].read()
start_time = pd.to_datetime(int(start_time), utc=True)
end_time = pd.to_datetime(int(end_time), utc=True)
start_time = start_time.strftime('%Y-%m-%d %H:%M:%S')
end_time = end_time.strftime('%Y-%m-%d %H:%M:%S')
fig = plt.figure()
axes = fig.add_subplot(111)
axes.errorbar(thresholds, camera_rate * 1E9,
yerr=camera_rate_error * 1E9, marker='o', color='k',
label='Start time : {}\nEnd time : {}\nEvent ID :'
' ({}, {})'.format(start_time, end_time, start_id,
end_id))
axes.set_yscale('log')
axes.set_xlabel('Threshold [LSB]')
axes.set_ylabel('Trigger rate [Hz]')
axes.legend(loc='best')
if args['--display']:
plt.show()
if figure_path is not None:
fig.savefig(figure_path)
if __name__ == '__main__':
entry()<|fim▁end|> | |
<|file_name|>auth.go<|end_file_name|><|fim▁begin|><|fim▁hole|> "github.com/skmetaly/pbblog/framework/session"
"net/http"
"net/url"
)
//AuthenticateRequest checks if for a given requrest the user is authenticated or not
func AuthenticateRequest(w http.ResponseWriter, r *http.Request) {
//Redirect to login if they are not authenticated
//Get session
sess := session.Instance(r)
//If user is not authenticated, don't allow them to access the page
if sess.Values["user_id"] == nil {
query := url.Values{}
query.Add("next", url.QueryEscape(r.URL.String()))
http.Redirect(w, r, "/admin/login?"+query.Encode(), http.StatusFound)
}
}<|fim▁end|> | package handlers
import ( |
<|file_name|>hebrew.js.uncompressed.js<|end_file_name|><|fim▁begin|>define(
"dojo/cldr/nls/hu/hebrew", //begin v1.x content<|fim▁hole|> "months-format-abbr": [
"Tisri",
"Hesván",
"Kiszlév",
"Tévész",
"Svát",
"Ádár I",
"Ádár",
"Niszán",
"Ijár",
"Sziván",
"Tamuz",
"Áv",
"Elul"
],
"months-format-abbr-leap": "Ádár II",
"months-format-wide": [
"Tisri",
"Hesván",
"Kiszlév",
"Tévész",
"Svát",
"Ádár risón",
"Ádár",
"Niszán",
"Ijár",
"Sziván",
"Tamuz",
"Áv",
"Elul"
],
"months-format-wide-leap": "Ádár séni",
"months-standAlone-abbr": [
"Tisri",
"Hesván",
"Kiszlév",
"Tévész",
"Svát",
"Ádár risón",
"Ádár",
"Niszán",
"Ijár",
"Sziván",
"Tamuz",
"Áv",
"Elul"
],
"months-standAlone-abbr-leap": "Ádár II",
"months-standAlone-wide": [
"Tisri",
"Hesván",
"Kiszlév",
"Tévész",
"Svát",
"Ádár risón",
"Ádár",
"Niszán",
"Ijár",
"Sziván",
"Tamuz",
"Áv",
"Elul"
],
"months-standAlone-wide-leap": "Ádár II",
"eraAbbr": [
"TÉ"
],
"eraNames": [
"TÉ"
],
"eraNarrow": [
"TÉ"
],
"days-format-abbr": [
"V",
"H",
"K",
"Sze",
"Cs",
"P",
"Szo"
],
"days-format-narrow": [
"V",
"H",
"K",
"Sz",
"Cs",
"P",
"Sz"
],
"days-format-wide": [
"vasárnap",
"hétfő",
"kedd",
"szerda",
"csütörtök",
"péntek",
"szombat"
],
"days-standAlone-abbr": [
"V",
"H",
"K",
"Sze",
"Cs",
"P",
"Szo"
],
"days-standAlone-narrow": [
"V",
"H",
"K",
"Sz",
"Cs",
"P",
"Sz"
],
"days-standAlone-wide": [
"vasárnap",
"hétfő",
"kedd",
"szerda",
"csütörtök",
"péntek",
"szombat"
],
"quarters-format-abbr": [
"N1",
"N2",
"N3",
"N4"
],
"quarters-format-wide": [
"I. negyedév",
"II. negyedév",
"III. negyedév",
"IV. negyedév"
],
"quarters-standAlone-abbr": [
"N1",
"N2",
"N3",
"N4"
],
"quarters-standAlone-wide": [
"1. negyedév",
"2. negyedév",
"3. negyedév",
"4. negyedév"
],
"dayPeriods-format-narrow-am": "de.",
"dayPeriods-format-narrow-pm": "du.",
"dayPeriods-format-wide-am": "de.",
"dayPeriods-format-wide-pm": "du.",
"dateFormat-full": "y. MMMM d., EEEE",
"dateFormat-long": "y. MMMM d.",
"dateFormat-medium": "yyyy.MM.dd.",
"dateFormat-short": "yyyy.MM.dd.",
"dateFormatItem-Ed": "d., E",
"dateFormatItem-h": "a h",
"dateFormatItem-H": "H",
"dateFormatItem-hm": "a h:mm",
"dateFormatItem-Hm": "H:mm",
"dateFormatItem-hms": "a h:mm:ss",
"dateFormatItem-Hms": "H:mm:ss",
"dateFormatItem-Md": "M. d.",
"dateFormatItem-MEd": "M. d., E",
"dateFormatItem-MMMd": "MMM d.",
"dateFormatItem-MMMEd": "MMM d., E",
"dateFormatItem-yM": "y.M.",
"dateFormatItem-yMd": "yyyy.MM.dd.",
"dateFormatItem-yMEd": "yyyy.MM.dd., E",
"dateFormatItem-yMMM": "y. MMM",
"dateFormatItem-yMMMd": "y. MMM d.",
"dateFormatItem-yMMMEd": "y. MMM d., E",
"dateFormatItem-yQQQ": "y. QQQ",
"timeFormat-full": "H:mm:ss zzzz",
"timeFormat-long": "H:mm:ss z",
"timeFormat-medium": "H:mm:ss",
"timeFormat-short": "H:mm"
}
//end v1.x content
);<|fim▁end|> | { |
<|file_name|>PResource.java<|end_file_name|><|fim▁begin|><|fim▁hole|>/* This file was generated by SableCC (http://www.sablecc.org/). */
package se.sics.kola.node;
public abstract class PResource extends Node
{
// Empty body
}<|fim▁end|> | |
<|file_name|>test_requests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from threadlocals.threadlocals import set_current_user
from django.contrib.auth import get_user_model
from powerdns.models import (
Domain,
DomainRequest,
Record,
RecordRequest,
)
from .utils import (
ServiceFactory,
assert_does_exist,
assert_not_exists,
)
class TestRequests(TestCase):
"""Tests for domain/record requests"""
def setUp(self):
self.user1 = get_user_model().objects.create_user(
'user1', '[email protected]', 'password'
)<|fim▁hole|> 'user2', '[email protected]', 'password'
)
self.domain = Domain.objects.create(
name='example.com',
type='NATIVE',
owner=self.user1
)
self.record = Record.objects.create(
domain=self.domain,
name='forum.example.com',
type='CNAME',
content='phpbb.example.com',
owner=self.user1,
)
def test_subdomain_creation(self):
set_current_user(self.user1)
request = DomainRequest.objects.create(
parent_domain=self.domain,
target_name='subdomain.example.com',
target_owner=self.user1,
target_service=ServiceFactory(),
)
request.accept()
assert_does_exist(
Domain, name='subdomain.example.com', owner=self.user1
)
def test_domain_change(self):
request = DomainRequest.objects.create(
domain=self.domain,
target_name='example.com',
target_type='MASTER',
owner=self.user2,
target_owner=self.user1,
target_service=ServiceFactory(),
)
request.accept()
assert_does_exist(
Domain,
name='example.com',
type='MASTER',
owner=self.user1
)
assert_not_exists(Domain, name='example.com', type='NATIVE')
def test_record_creation(self):
request = RecordRequest.objects.create(
domain=self.domain,
target_type='CNAME',
target_name='site.example.com',
target_content='www.example.com',
owner=self.user1,
target_owner=self.user2,
)
request.accept()
assert_does_exist(
Record,
content='www.example.com',
owner=self.user2,
)
def test_record_change(self):
request = RecordRequest.objects.create(
domain=self.domain,
record=self.record,
target_type='CNAME',
target_name='forum.example.com',
target_content='djangobb.example.com',
target_owner=self.user2,
owner=self.user1,
)
request.accept()
assert_does_exist(Record, content='djangobb.example.com')
assert_not_exists(Record, content='phpbb.example.com')<|fim▁end|> | self.user2 = get_user_model().objects.create_user( |
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from flask.ext.script import Manager
from flask_frozen import Freezer
import discovery
import logging
out = logging.StreamHandler()
out.setFormatter(logging.Formatter())
out.setLevel(logging.DEBUG)
logging.getLogger('freepto-web').setLevel(logging.INFO)
logging.getLogger('freepto-web').addHandler(out)
from app import app
manager = Manager(app)
freezer = Freezer(app)
@freezer.register_generator
def index():
yield {}
@freezer.register_generator
def page_index():
for lang in discovery.lang_dirs:
yield {'lang': lang}
@freezer.register_generator
def page():
for lang in discovery.lang_dirs:
for title in discovery.find_pages(lang):
yield {'lang': lang, 'title': title}
@manager.command
def freeze():<|fim▁hole|>if __name__ == "__main__":
manager.run()<|fim▁end|> | freezer.freeze()
|
<|file_name|>evacuation.py<|end_file_name|><|fim▁begin|># python3
import queue
class Edge:
def __init__(self, u, v, capacity):
self.u = u
self.v = v
self.capacity = capacity
self.flow = 0
# This class implements a bit unusual scheme for storing edges of the graph,
# in order to retrieve the backward edge for a given edge quickly.
class FlowGraph:
def __init__(self, n):
# List of all - forward and backward - edges
self.edges = []
# These adjacency lists store only indices of edges in the edges list
self.graph = [[] for _ in range(n)]
def add_edge(self, from_, to, capacity):
# Note that we first append a forward edge and then a backward edge,
# so all forward edges are stored at even indices (starting from 0),
# whereas backward edges are stored at odd indices.
forward_edge = Edge(from_, to, capacity)
backward_edge = Edge(to, from_, 0)
self.graph[from_].append(len(self.edges))
self.edges.append(forward_edge)
self.graph[to].append(len(self.edges))
self.edges.append(backward_edge)
def size(self):
return len(self.graph)
def get_ids(self, from_):
return self.graph[from_]
def get_edge(self, id):
return self.edges[id]
def add_flow(self, id, flow):
# To get a backward edge for a true forward edge (i.e id is even), we should get id + 1
# due to the described above scheme. On the other hand, when we have to get a "backward"
# edge for a backward edge (i.e. get a forward edge for backward - id is odd), id - 1
# should be taken.
#
# It turns out that id ^ 1 works for both cases. Think this through!
self.edges[id].flow += flow
self.edges[id ^ 1].flow -= flow
def read_data():
vertex_count, edge_count = map(int, input().split())
graph = FlowGraph(vertex_count)<|fim▁hole|> return graph
def BFS(graph, s):
dist = [-1] * graph.size()
path_edge_ids = [None] * graph.size()
dist[s] = 0
q = queue.Queue()
q.put(s)
while not q.empty():
u = q.get()
edge_ids = graph.graph[u]
for edge, edge_id in [(graph.get_edge(e_id), e_id) for e_id in edge_ids]:
if dist[edge.v] == -1 and (edge.capacity - edge.flow) > 0:
q.put(edge.v)
dist[edge.v] = dist[u] + 1
path_edge_ids[edge.v] = edge_id
return dist, path_edge_ids
def ReconstructPath(s, u, path_edge_ids, graph):
result = []
while u != s:
e_to_u_id = path_edge_ids[u]
result.append(e_to_u_id)
u = graph.get_edge(e_to_u_id).u
return result
def max_flow(graph, from_, to):
flow = 0
while True:
(dist, path_edge_ids) = BFS(graph, from_)
if path_edge_ids[to] is None:
return flow
path_to_sink_edge_ids = ReconstructPath(from_, to, path_edge_ids, graph)
X = min([(graph.get_edge(e_id).capacity - graph.get_edge(e_id).flow) for e_id in path_to_sink_edge_ids])
for e_id in path_to_sink_edge_ids:
graph.add_flow(e_id, X)
flow += X
if __name__ == "__main__":
graph = read_data()
print(max_flow(graph, 0, graph.size() - 1))<|fim▁end|> | for _ in range(edge_count):
u, v, capacity = map(int, input().split())
graph.add_edge(u - 1, v - 1, capacity) |
<|file_name|>probe_test.go<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package probe_test
import (
"os"
"testing"
"github.com/minio/minio/pkg/probe"
. "gopkg.in/check.v1"
)
func Test(t *testing.T) { TestingT(t) }
type MySuite struct{}
var _ = Suite(&MySuite{})
func testDummy0() *probe.Error {
_, e := os.Stat("this-file-cannot-exit")
return probe.NewError(e)
}
func testDummy1() *probe.Error {
return testDummy0().Trace("DummyTag1")
}
func testDummy2() *probe.Error {
return testDummy1().Trace("DummyTag2")
}
func (s *MySuite) TestProbe(c *C) {
es := testDummy2().Trace("TopOfStack")
// Uncomment the following Println to visually test probe call trace.
// fmt.Println("Expecting a simulated error here.", es)
c.Assert(es, Not(Equals), nil)
newES := es.Trace()
c.Assert(newES, Not(Equals), nil)
}
func (s *MySuite) TestWrappedError(c *C) {
_, e := os.Stat("this-file-cannot-exit")
es := probe.NewError(e) // *probe.Error
e = probe.WrapError(es) // *probe.WrappedError
_, ok := probe.UnwrapError(e)
c.Assert(ok, Equals, true)
}<|fim▁end|> | * Minio Cloud Storage, (C) 2015 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>extern crate build_utils;
extern crate protoc_grpcio;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use build_utils::BuildRoot;
fn main() {
let build_root = BuildRoot::find().unwrap();
let thirdpartyprotobuf = build_root.join("3rdparty/protobuf");
println!(
"cargo:rerun-if-changed={}",
thirdpartyprotobuf.to_str().unwrap()
);
let gen_dir = PathBuf::from("src/gen");
// Re-gen if, say, someone does a git clean on the gen dir but not the target dir. This ensures
// generated sources are available for reading by programmers and tools like rustfmt alike.
println!("cargo:rerun-if-changed={}", gen_dir.to_str().unwrap());
<|fim▁hole|> protoc_grpcio::compile_grpc_protos(
&[
"google/devtools/remoteexecution/v1test/remote_execution.proto",
"google/bytestream/bytestream.proto",
"google/rpc/code.proto",
"google/rpc/error_details.proto",
"google/rpc/status.proto",
"google/longrunning/operations.proto",
"google/protobuf/empty.proto",
],
&[
thirdpartyprotobuf.join("googleapis"),
thirdpartyprotobuf.join("standard"),
thirdpartyprotobuf.join("rust-protobuf"),
],
&gen_dir,
).expect("Failed to compile protos!");
let listing = gen_dir.read_dir().unwrap();
let mut pub_mod_stmts = listing
.filter_map(|d| {
let dirent = d.unwrap();
let file_name = dirent.file_name().into_string().unwrap();
match file_name.trim_right_matches(".rs") {
"mod" | ".gitignore" => None,
module_name => Some(format!("pub mod {};", module_name)),
}
})
.collect::<Vec<_>>();
pub_mod_stmts.sort();
let contents = format!(
"\
// This file is generated. Do not edit.
{}
",
pub_mod_stmts.join("\n")
);
File::create(gen_dir.join("mod.rs"))
.and_then(|mut f| f.write_all(contents.as_bytes()))
.expect("Failed to write mod.rs")
}<|fim▁end|> | |
<|file_name|>filecredsource.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package externalaccount
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"io/ioutil"
"os"
)
type fileCredentialSource struct {
File string
Format format
}
func (cs fileCredentialSource) subjectToken() (string, error) {
tokenFile, err := os.Open(cs.File)
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to open credential file %q", cs.File)
}
defer tokenFile.Close()
tokenBytes, err := ioutil.ReadAll(io.LimitReader(tokenFile, 1<<20))
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to read credential file: %v", err)
}
tokenBytes = bytes.TrimSpace(tokenBytes)
switch cs.Format.Type {
case "json":
jsonData := make(map[string]interface{})
err = json.Unmarshal(tokenBytes, &jsonData)
if err != nil {
return "", fmt.Errorf("oauth2/google: failed to unmarshal subject token file: %v", err)
}
val, ok := jsonData[cs.Format.SubjectTokenFieldName]
if !ok {
return "", errors.New("oauth2/google: provided subject_token_field_name not found in credentials")
}
token, ok := val.(string)
if !ok {
return "", errors.New("oauth2/google: improperly formatted subject token")
}
return token, nil
case "text":
return string(tokenBytes), nil
case "":
return string(tokenBytes), nil
default:
return "", errors.New("oauth2/google: invalid credential_source file format type")
}
}<|fim▁end|> | // Copyright 2020 The Go Authors. All rights reserved. |
<|file_name|>FanMapMaker.java<|end_file_name|><|fim▁begin|>import java.util.List;
import java.util.LinkedList;
import java.util.ArrayList;
import java.util.Arrays;
import java.io.File;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.Scanner;
import java.io.IOException;
import java.io.FileNotFoundException;
import java.lang.NumberFormatException;
import java.util.Collections;
public class FanMapMaker {
private static class IntList extends ArrayList<Integer> {}
private void printChargesArray(int[] distribution, int size) {
int percent;
System.out.println("temp2tcharge:");
for(int i = 0; i <= size; i++) {
// retlw .248 ;31 PWM=97%
percent = (100*i)/60;
System.out.println("\tretlw\t\t." + distribution[percent] + "\t\t; " + i + " " + percent + "%");
}
}
private void printChargesArrayStatAll(int[] distribution, int size) {
int percent;
System.out.println("temp2tcharge_tab:");
for(int i = 0; i <= size; i++) {
percent = (100*i)/size;
System.out.println("\XORLW\t\t."+i+"\n"+
"\tBTFSC\t\tSTATUS,Z\n"+
"\tMOVLW\t\t."+distribution[percent]+"\t\t; [" + i + "] " + percent + "%\n"<|fim▁hole|> System.out.println("\tGOTO\t\ttemp2tcharge_tab_end");
}
private void printChargesArrayStatic(int[] distribution, int size) {
int percent;
String tmpRegName = "TMP0";
String endLabel = "temp2tcharge_tab_end";
System.out.println("temp2tcharge_tab:");
System.out.println("\tMOVLW\t\t.255\n\tMOVF\t\t"+tmpRegName+",F\n\tBTFSS\t\tSTATUS,Z\n\tGOTO\t\tnext1\n\tGOTO\t\t"+endLabel);
for(int i = 1; i <= size; i++) {
// retlw .248 ;31 PWM=97%
percent = (100*i)/60;
//System.out.println("\tretlw\t\t." + distribution[percent] + "\t\t; " + i + " " + percent + "%");
System.out.println("next"+i+":\n\tMOVLW\t\t."+ distribution[percent]+"\t\t; [" + i + "] " +
percent + "%\n\tDECFSZ\t\t"+tmpRegName+",F\n\tBTFSS\t\tSTATUS,Z\n\tGOTO\t\t"+
((i<size) ? "next"+(i+1) : endLabel) + "\n\tGOTO\t\t"+endLabel);
}
}
public static void main(String[] a) {
FanMapMaker fmp = new FanMapMaker();
IntList percentToCharge[] = new IntList[101];
int res[] = new int[101];
for(int i = 0; i < percentToCharge.length; i++)
percentToCharge[i] = new IntList();
File decFile = new File("allDec.dat");
File incFile = new File("allInc.dat");
BufferedReader decReader = null;
BufferedReader incReader = null;
Integer tchrg;
Integer fanPercx;
Float sum;
Float fanPerc;
try {
//decReader = new BufferedReader(new FileReader(decFile));
//incReader = new BufferedReader(new FileReader(incFile));
Scanner decScan = new Scanner(decFile);
Scanner incScan = new Scanner(incFile);
int cnt = 0;
while (decScan.hasNext()) {
tchrg = decScan.nextInt();
fanPerc = 0.0f;
for(int i = 0; i < 3; i++) {
fanPerc += decScan.nextFloat();
}
fanPerc /= 3.0f;
fanPercx = Math.round(fanPerc);
percentToCharge[fanPercx].add(tchrg); //new Float(decScan.nextFloat())
//System.out.println(tchrg + " " + fanPercx);
}
while (incScan.hasNext()) {
tchrg = incScan.nextInt();
fanPerc = 0.0f;
for(int i = 0; i < 3; i++) {
fanPerc += incScan.nextFloat();
}
fanPerc /= 3.0f;
fanPercx = Math.round(fanPerc);
percentToCharge[fanPercx].add(tchrg); //new Float(decScan.nextFloat())
//System.out.println(tchrg + " " + fanPercx);
}
for (int i = 0; i < percentToCharge.length; i++) {
Collections.sort(percentToCharge[i]);
//System.out.print("" + i + " " + "[");
for(Integer e: percentToCharge[i]) {
//System.out.print(e + " ");
}
//System.out.println("]");
}
int last = 1;
for (int i = percentToCharge.length - 1; i >= 0; i--) {
if(percentToCharge[i].size() > 0) {
res[i] = percentToCharge[i].get(0);
last = res[i];
} else {
res[i] = last;
}
//System.out.println(i + " " + res[i]);
}
fmp.printChargesArrayStatAll(res, 50);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (NumberFormatException e) {
e.printStackTrace();
} finally {
/*try {
if (decReader != null) {
//decReader.close();
}
} //catch (IOException e) {
//}
try {
if (incReader != null) {
//incReader.close();
}
} //catch (IOException e) {
//}
*/
}
}
}<|fim▁end|> | );
} |
<|file_name|>junk-tests.ts<|end_file_name|><|fim▁begin|>/// <reference types="node" />
<|fim▁hole|>
junk.is('foo'); // $ExpectType boolean
junk.not('foo'); // $ExpectType boolean
junk.regex; // $ExpectType RegExp
fs.readdir('some/path', (err, files) => {
files.filter(junk.not);
});<|fim▁end|> | import fs = require('fs');
import * as junk from 'junk'; |
<|file_name|>opcodes.rs<|end_file_name|><|fim▁begin|>use {Node, ValueInfo};
#[derive(Copy,Clone,Debug,PartialEq,Eq)]
pub enum OpCode
{
Add,
Sub,
Mul,
Div,
Shl,
Shr,
Ret,<|fim▁hole|> /// Zero extension.
/// `(sext 16 %value)`
Zext,
/// Set a register.
/// `(set %reg, %value)`
Set,
}
impl OpCode
{
pub fn mnemonic(&self) -> String {
format!("{:?}", self).to_lowercase()
}
pub fn value_infos(&self, operands: &[Node]) -> Vec<ValueInfo> {
match *self {
OpCode::Set => {
assert_eq!(operands.len(), 2);
vec![ValueInfo::Output, ValueInfo::Input]
},
// Everything else is all-inputs.
_ => {
operands.iter().map(|_| ValueInfo::Input).collect()
},
}
}
}<|fim▁end|> | /// Signed extension,
/// `(sext 16 %value)`
Sext, |
<|file_name|>edgeop_lsys.rs<|end_file_name|><|fim▁begin|>extern crate rand;
extern crate evo;
extern crate petgraph;
#[macro_use]
extern crate graph_annealing;
extern crate pcg;
extern crate triadic_census;
extern crate lindenmayer_system;
extern crate graph_edge_evolution;
extern crate asexp;
extern crate expression;
extern crate expression_num;
extern crate expression_closed01;
extern crate matplotlib;
extern crate closed01;
extern crate graph_io_gml;
extern crate nsga2;
#[path="genome/genome_edgeop_lsys.rs"]
pub mod genome;
use std::str::FromStr;
use rand::{Rng, SeedableRng};
use rand::os::OsRng;
use pcg::PcgRng;
use evo::Probability;
use nsga2::{Driver, DriverConfig};
use genome::{Genome, Toolbox};
use graph_annealing::helper::to_weighted_vec;
use graph_annealing::goal::{FitnessFunction, Goal};
use graph_annealing::graph;
pub use graph_annealing::UniformDistribution;
use graph_annealing::stat::Stat;
use petgraph::{Directed, EdgeDirection, Graph};
use triadic_census::OptDenseDigraph;
use std::fs::File;
use genome::{RuleMutOp, RuleProductionMutOp, VarOp};
use genome::edgeop::{EdgeOp, edgeops_to_graph};
use genome::expr_op::{FlatExprOp, RecursiveExprOp, EXPR_NAME};
use std::io::Read;
use asexp::Sexp;
use asexp::sexp::pp;
use std::env;
use std::collections::BTreeMap;
use std::fmt::Debug;
use matplotlib::{Env, Plot};
struct ReseedRecorder {
reseeds: Vec<(u64, u64)>
}
/*
impl Reseeder<pcg::RcgRng> for ReseedRecorder {
fn reseed(&mut self, rng: &mut pcg::RcgRng) {
let mut r = rand::thread_rng();
let s1 = r.next_u64();
let s2 = r.next_u64();
self.reseeds.push((s1, s2));
rng.reseed([s1, s2]);
}
}
*/
const MAX_OBJECTIVES: usize = 3;
fn graph_to_sexp<N, E, F, G>(g: &Graph<N, E, Directed>,
node_weight_map: F,
edge_weight_map: G)
-> Sexp
where F: Fn(&N) -> Option<Sexp>,
G: Fn(&E) -> Option<Sexp>
{
let mut nodes = Vec::new();
for node_idx in g.node_indices() {
let edges: Vec<_> = g.edges_directed(node_idx, EdgeDirection::Outgoing)
.map(|(target_node, edge_weight)| {
match edge_weight_map(edge_weight) {
Some(w) => Sexp::from((target_node.index(), w)),
None => Sexp::from(target_node.index()),
}
})
.collect();
let mut def = vec![
(Sexp::from("id"), Sexp::from(node_idx.index())),
(Sexp::from("edges"), Sexp::Array(edges)),
];
match node_weight_map(&g[node_idx]) {
Some(w) => def.push((Sexp::from("weight"), w)),
None => {}
}
nodes.push(Sexp::Map(def));
}
Sexp::Map(vec![
(Sexp::from("version"), Sexp::from(1usize)),
(Sexp::from("nodes"), Sexp::Array(nodes)),
])
}
#[derive(Debug)]
struct ConfigGenome {
max_iter: usize,
rules: usize,
initial_len: usize,
symbol_arity: usize,
num_params: usize,
prob_terminal: Probability,
}
#[derive(Debug)]
struct Config {
ngen: usize,
mu: usize,
lambda: usize,
k: usize,
seed: Vec<u64>,
objectives: Vec<Objective>,
graph: Graph<f32, f32, Directed>,
edge_ops: Vec<(EdgeOp, u32)>,
var_ops: Vec<(VarOp, u32)>,
rule_mut_ops: Vec<(RuleMutOp, u32)>,
rule_prod_ops: Vec<(RuleProductionMutOp, u32)>,
flat_expr_op: Vec<(FlatExprOp, u32)>,
recursive_expr_op: Vec<(RecursiveExprOp, u32)>,
genome: ConfigGenome,
plot: bool,
weight: f64,
}
#[derive(Debug)]
struct Objective {
fitness_function: FitnessFunction,
threshold: f32,
}
fn parse_ops<T, I>(map: &BTreeMap<String, Sexp>, key: &str) -> Vec<(T, u32)>
where T: FromStr<Err = I> + UniformDistribution,
I: Debug
{
if let Some(&Sexp::Map(ref list)) = map.get(key) {
let mut ops: Vec<(T, u32)> = Vec::new();
for &(ref k, ref v) in list.iter() {
ops.push((T::from_str(k.get_str().unwrap()).unwrap(),
v.get_uint().unwrap() as u32));
}
ops
} else {
T::uniform_distribution()
}
}
fn convert_weight(w: Option<&Sexp>) -> Option<f32> {
match w {
Some(s) => s.get_float().map(|f| f as f32),
None => {
// use a default
Some(0.0)
}
}
}
fn parse_config(sexp: Sexp) -> Config {
let map = sexp.into_map().unwrap();
// number of generations
let ngen: usize = map.get("ngen").and_then(|v| v.get_uint()).unwrap() as usize;
// size of population
let mu: usize = map.get("mu").and_then(|v| v.get_uint()).unwrap() as usize;
// size of offspring population
let lambda: usize = map.get("lambda").and_then(|v| v.get_uint()).unwrap() as usize;
// tournament selection
let k: usize = map.get("k").and_then(|v| v.get_uint()).unwrap_or(2) as usize;
assert!(k > 0);
let plot: bool = map.get("plot").map(|v| v.get_str() == Some("true")).unwrap_or(false);
let weight: f64 = map.get("weight").and_then(|v| v.get_float()).unwrap_or(1.0);
let seed: Vec<u64>;
if let Some(seed_expr) = map.get("seed") {
seed = seed_expr.get_uint_vec().unwrap();
} else {
println!("Use OsRng to generate seed..");
let mut rng = OsRng::new().unwrap();
seed = (0..2).map(|_| rng.next_u64()).collect();
}
// Parse objectives and thresholds
let mut objectives: Vec<Objective> = Vec::new();
if let Some(&Sexp::Map(ref list)) = map.get("objectives") {
for &(ref k, ref v) in list.iter() {
objectives.push(Objective {
fitness_function: FitnessFunction::from_str(k.get_str().unwrap()).unwrap(),
threshold: v.get_float().unwrap() as f32,
});
}
} else {
panic!("Map expected");
}
if objectives.len() > MAX_OBJECTIVES {
panic!("Max {} objectives allowed", MAX_OBJECTIVES);
}
// read graph
let graph_file = map.get("graph").unwrap().get_str().unwrap();
println!("Using graph file: {}", graph_file);
let graph_s = {
let mut graph_file = File::open(graph_file).unwrap();
let mut graph_s = String::new();
let _ = graph_file.read_to_string(&mut graph_s).unwrap();
graph_s
};
let graph = graph_io_gml::parse_gml(&graph_s,
&convert_weight,
&convert_weight)
.unwrap();
println!("graph: {:?}", graph);
let graph = graph::normalize_graph(&graph);
let genome_map = map.get("genome").unwrap().clone().into_map().unwrap();
Config {
ngen: ngen,
mu: mu,
lambda: lambda,
k: k,
plot: plot,
weight: weight,
seed: seed,
objectives: objectives,
graph: graph,
edge_ops: parse_ops(&map, "edge_ops"),
var_ops: parse_ops(&map, "var_ops"),
rule_mut_ops: parse_ops(&map, "rule_mut_ops"),
rule_prod_ops: parse_ops(&map, "rule_prod_mut_ops"),
flat_expr_op: parse_ops(&map, "flat_expr_ops"),
recursive_expr_op: parse_ops(&map, "recursive_expr_ops"),
genome: ConfigGenome {
rules: genome_map.get("rules").and_then(|v| v.get_uint()).unwrap() as usize,
symbol_arity: genome_map.get("symbol_arity").and_then(|v| v.get_uint()).unwrap() as usize,
num_params: genome_map.get("num_params").and_then(|v| v.get_uint()).unwrap() as usize,
initial_len: genome_map.get("initial_len").and_then(|v| v.get_uint()).unwrap() as usize,
max_iter: genome_map.get("max_iter").and_then(|v| v.get_uint()).unwrap() as usize,
prob_terminal: Probability::new(genome_map.get("prob_terminal").and_then(|v| v.get_float()).unwrap() as f32),
},
}
}
fn main() {
println!("Using expr system: {}", EXPR_NAME);
let env = Env::new();
let plot = Plot::new(&env);
let mut s = String::new();
let configfile = env::args().nth(1).unwrap();
let _ = File::open(configfile).unwrap().read_to_string(&mut s).unwrap();
let expr = asexp::Sexp::parse_toplevel(&s).unwrap();
let config = parse_config(expr);
println!("{:#?}", config);
if config.plot {
plot.interactive();
plot.show();
}
let num_objectives = config.objectives.len();
let driver_config = DriverConfig {
mu: config.mu,
lambda: config.lambda,
k: config.k,
ngen: config.ngen,
num_objectives: num_objectives
};
let toolbox = Toolbox::new(Goal::new(OptDenseDigraph::from(config.graph.clone())),
config.objectives
.iter()
.map(|o| o.threshold)
.collect(),
config.objectives
.iter()
.map(|o| o.fitness_function.clone())
.collect(),
config.genome.max_iter, // iterations
config.genome.rules, // num_rules
config.genome.initial_len, // initial rule length
config.genome.symbol_arity, // we use 1-ary symbols
config.genome.num_params,
config.genome.prob_terminal,
to_weighted_vec(&config.edge_ops),
to_weighted_vec(&config.flat_expr_op),
to_weighted_vec(&config.recursive_expr_op),
to_weighted_vec(&config.var_ops),
to_weighted_vec(&config.rule_mut_ops),
to_weighted_vec(&config.rule_prod_ops));
assert!(config.seed.len() == 2);
let mut rng: PcgRng = SeedableRng::from_seed([config.seed[0], config.seed[1]]);
//let mut rng = rand::thread_rng();
let selected_population = toolbox.run(&mut rng, &driver_config, config.weight, &|iteration, duration, num_optima, population| {
let duration_ms = (duration as f32) / 1_000_000.0;
print!("# {:>6}", iteration);
let fitness_values = population.fitness_to_vec();
// XXX: Assume we have at least two objectives
let mut x = Vec::new();
let mut y = Vec::new();
for f in fitness_values.iter() {
x.push(f.objectives[0]);
y.push(f.objectives[1]);
}
if config.plot {
plot.clf();
plot.title(&format!("Iteration: {}", iteration));
plot.grid(true);
plot.scatter(&x, &y);
plot.draw();
}
// calculate a min/max/avg value for each objective.
let stats: Vec<Stat<f32>> = (0..num_objectives)
.into_iter()
.map(|i| {
Stat::from_iter(fitness_values.iter().map(|o| o.objectives[i]))
.unwrap()
})
.collect();
for stat in stats.iter() {
print!(" | ");
print!("{:>8.2}", stat.min);
print!("{:>9.2}", stat.avg);
print!("{:>10.2}", stat.max);
}
print!(" | {:>5} | {:>8.0} ms", num_optima, duration_ms);
println!("");
if num_optima > 0 {
println!("Found premature optimum in Iteration {}", iteration);
}
});
println!("===========================================================");
let mut best_solutions: Vec<(Genome, _)> = Vec::new();
selected_population.all_of_rank(0, &mut |ind, fit| {
if fit.objectives[0] < 0.1 && fit.objectives[1] < 0.1 {
best_solutions.push((ind.clone(), fit.clone()));
}
});
println!("Target graph");
let sexp = graph_to_sexp(&graph::normalize_graph_closed01(&config.graph),
|nw| Some(Sexp::from(nw.get())),
|ew| Some(Sexp::from(ew.get())));
println!("{}", pp(&sexp));
let mut solutions: Vec<Sexp> = Vec::new();
for (_i, &(ref ind, ref fitness)) in best_solutions.iter().enumerate() {
let genome: Sexp = ind.into();
let edge_ops = ind.to_edge_ops(&toolbox.axiom_args, toolbox.iterations);
let g = edgeops_to_graph(&edge_ops);
// output as sexp
let graph_sexp = graph_to_sexp(g.ref_graph(),
|&nw| Some(Sexp::from(nw)),
|&ew| Some(Sexp::from(ew)));
solutions.push(Sexp::Map(
vec![
(Sexp::from("fitness"), Sexp::from((fitness.objectives[0], fitness.objectives[1], fitness.objectives[2]))),
(Sexp::from("genome"), genome),
(Sexp::from("graph"), graph_sexp),
]
));
/*
draw_graph(g.ref_graph(),
// XXX: name
&format!("edgeop_lsys_g{}_f{}_i{}.svg",
config.ngen,
fitness.objectives[1] as usize,
i));
*/
}
<|fim▁hole|>
println!("{:#?}", config);
}<|fim▁end|> | println!("{}", pp(&Sexp::from(("solutions", Sexp::Array(solutions)))));
//println!("])"); |
<|file_name|>ServiceRepeatCreateException.java<|end_file_name|><|fim▁begin|>package com.xiaojinzi.component.error;
public class ServiceRepeatCreateException extends RuntimeException {
public ServiceRepeatCreateException() {
}
public ServiceRepeatCreateException(String message) {
super(message);<|fim▁hole|>
public ServiceRepeatCreateException(String message, Throwable cause) {
super(message, cause);
}
public ServiceRepeatCreateException(Throwable cause) {
super(cause);
}
}<|fim▁end|> | } |
<|file_name|>RequestDB.py<|end_file_name|><|fim▁begin|>########################################################################
# $HeadURL $
# File: RequestDB.py
# Author: [email protected]
# Date: 2012/12/04 08:06:30
########################################################################
""" :mod: RequestDB
=======================
.. module: RequestDB
:synopsis: db holding Requests
.. moduleauthor:: [email protected]
db holding Request, Operation and File
"""
__RCSID__ = "$Id $"
# #
# @file RequestDB.py
# @author [email protected]
# @date 2012/12/04 08:06:51
# @brief Definition of RequestDB class.
# # imports
import random
import threading
# Get rid of the annoying Deprecation warning of the current MySQLdb
# FIXME: compile a newer MySQLdb version
import warnings
with warnings.catch_warnings():
warnings.simplefilter( 'ignore', DeprecationWarning )
import MySQLdb.cursors
from MySQLdb import Error as MySQLdbError
# # from DIRAC
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.Base.DB import DB
from DIRAC.Core.Utilities.List import stringListToString
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.Client.Operation import Operation
from DIRAC.RequestManagementSystem.Client.File import File
########################################################################
class RequestDB( DB ):
"""
.. class:: RequestDB
db holding requests
"""
def __init__( self, systemInstance = 'Default', maxQueueSize = 10 ):
"""c'tor
:param self: self reference
"""
self.getIdLock = threading.Lock()
DB.__init__( self, "ReqDB", "RequestManagement/ReqDB", maxQueueSize )
def createTables( self, toCreate = None, force = False ):
""" create tables """
toCreate = toCreate if toCreate else []
if not toCreate:
return S_OK()
tableMeta = self.getTableMeta()
metaCreate = {}
for tableName in toCreate:
metaCreate[tableName] = tableMeta[tableName]
if metaCreate:
return self._createTables( metaCreate, force )
return S_OK()
@staticmethod
def getTableMeta():
""" get db schema in a dict format """
return dict( [ ( classDef.__name__, classDef.tableDesc() )
for classDef in ( Request, Operation, File ) ] )
def getTables( self ):
""" get tables """
showTables = self._query( "SHOW TABLES;" )
if not showTables["OK"]:
return showTables
return S_OK( [ table[0] for table in showTables["Value"] if table ] )
def dictCursor( self, conn = None ):
""" get dict cursor for connection :conn:
:return: S_OK( { "cursor": MySQLdb.cursors.DictCursor, "connection" : connection } ) or S_ERROR
"""
if not conn:
retDict = self._getConnection()
if not retDict["OK"]:
self.log.error( retDict["Message"] )
return retDict
conn = retDict["Value"]
cursor = conn.cursor( cursorclass = MySQLdb.cursors.DictCursor )
return S_OK( ( conn, cursor ) )
def _transaction( self, queries ):
""" execute transaction """
queries = [ queries ] if type( queries ) == str else queries
# # get cursor and connection
getCursorAndConnection = self.dictCursor()
if not getCursorAndConnection["OK"]:
self.log.error( getCursorAndConnection["Message"] )
return getCursorAndConnection
connection, cursor = getCursorAndConnection["Value"]
# # this will be returned as query result
ret = { "OK" : True }
queryRes = { }
# # switch off autocommit
connection.autocommit( False )
try:
# # execute queries
for query in queries:
cursor.execute( query )
queryRes[query] = list( cursor.fetchall() )
# # commit
connection.commit()
# # save last row ID
lastrowid = cursor.lastrowid
# # close cursor
cursor.close()
ret["Value"] = queryRes
ret["lastrowid"] = lastrowid
connection.autocommit( True )
return ret
except MySQLdbError, error:
self.log.exception( error )
# # rollback
connection.rollback()
# # rever autocommit
connection.autocommit( True )
# # close cursor
cursor.close()
return S_ERROR( str( error ) )
def putRequest( self, request ):
""" update or insert request into db
:param Request request: Request instance
"""
query = "SELECT `RequestID` from `Request` WHERE `RequestName` = '%s'" % request.RequestName
exists = self._transaction( query )
if not exists["OK"]:
self.log.error( "putRequest: %s" % exists["Message"] )
return exists
exists = exists["Value"]
if exists[query] and exists[query][0]["RequestID"] != request.RequestID:
return S_ERROR( "putRequest: request '%s' already exists in the db (RequestID=%s)"\
% ( request.RequestName, exists[query][0]["RequestID"] ) )
reqSQL = request.toSQL()
if not reqSQL["OK"]:
return reqSQL
reqSQL = reqSQL["Value"]
putRequest = self._transaction( reqSQL )
if not putRequest["OK"]:
self.log.error( "putRequest: %s" % putRequest["Message"] )
return putRequest
lastrowid = putRequest["lastrowid"]
putRequest = putRequest["Value"]
cleanUp = request.cleanUpSQL()
if cleanUp:
dirty = self._transaction( cleanUp )
if not dirty["OK"]:
self.log.error( "putRequest: unable to delete dirty Operation records: %s" % dirty["Message"] )
return dirty
# # flag for a new request
isNew = False
# # set RequestID when necessary
if request.RequestID == 0:
isNew = True
request.RequestID = lastrowid
for operation in request:
cleanUp = operation.cleanUpSQL()
if cleanUp:
dirty = self._transaction( [ cleanUp ] )
if not dirty["OK"]:
self.log.error( "putRequest: unable to delete dirty File records: %s" % dirty["Message"] )
return dirty
opSQL = operation.toSQL()["Value"]
putOperation = self._transaction( opSQL )
if not putOperation["OK"]:
self.log.error( "putRequest: unable to put operation %d: %s" % ( request.indexOf( operation ),
putOperation["Message"] ) )
if isNew:
deleteRequest = self.deleteRequest( request.RequestName )
if not deleteRequest["OK"]:
self.log.error( "putRequest: unable to delete request '%s': %s"\
% ( request.RequestName, deleteRequest["Message"] ) )
return deleteRequest
return putOperation
lastrowid = putOperation["lastrowid"]
putOperation = putOperation["Value"]
if operation.OperationID == 0:
operation.OperationID = lastrowid
filesToSQL = [ opFile.toSQL()["Value"] for opFile in operation ]
if filesToSQL:
putFiles = self._transaction( filesToSQL )
if not putFiles["OK"]:
self.log.error( "putRequest: unable to put files for operation %d: %s" % ( request.indexOf( operation ),
putFiles["Message"] ) )
if isNew:
deleteRequest = self.deleteRequest( request.requestName )
if not deleteRequest["OK"]:
self.log.error( "putRequest: unable to delete request '%s': %s"\
% ( request.RequestName, deleteRequest["Message"] ) )
return deleteRequest
return putFiles
return S_OK( request.RequestID )
def getScheduledRequest( self, operationID ):
""" read scheduled request given its FTS operationID """
query = "SELECT `Request`.`RequestName` FROM `Request` JOIN `Operation` ON "\
"`Request`.`RequestID`=`Operation`.`RequestID` WHERE `OperationID` = %s;" % operationID
requestName = self._query( query )
if not requestName["OK"]:
self.log.error( "getScheduledRequest: %s" % requestName["Message"] )
return requestName
requestName = requestName["Value"]
if not requestName:
return S_OK()
return self.getRequest( requestName[0][0] )
def getRequestName( self, requestID ):
""" get Request.RequestName for a given Request.RequestID """
query = "SELECT `RequestName` FROM `Request` WHERE `RequestID` = %s" % requestID
query = self._query( query )
if not query["OK"]:
self.log.error( "getRequestName: %s" % query["Message"] )
query = query["Value"]
if not query:
return S_ERROR( "getRequestName: no request found for RequestID=%s" % requestID )
return S_OK( query[0][0] )
def getRequest( self, requestName = '', assigned = True ):
""" read request for execution
:param str requestName: request's name (default None)
"""
requestID = None
if requestName:
self.log.info( "getRequest: selecting request '%s'" % requestName )
reqIDQuery = "SELECT `RequestID`, `Status` FROM `Request` WHERE `RequestName` = '%s';" % str( requestName )
reqID = self._transaction( reqIDQuery )
if not reqID["OK"]:
self.log.error( "getRequest: %s" % reqID["Message"] )
return reqID
requestID = reqID["Value"][reqIDQuery][0]["RequestID"] if "RequestID" in reqID["Value"][reqIDQuery][0] else None
status = reqID["Value"][reqIDQuery][0]["Status"] if "Status" in reqID["Value"][reqIDQuery][0] else None
if not all( ( requestID, status ) ):
return S_ERROR( "getRequest: request '%s' not exists" % requestName )
if requestID and status and status == "Assigned" and assigned:
return S_ERROR( "getRequest: status of request '%s' is 'Assigned', request cannot be selected" % requestName )
else:
reqIDsQuery = "SELECT `RequestID` FROM `Request` WHERE `Status` = 'Waiting' ORDER BY `LastUpdate` ASC LIMIT 100;"
reqIDs = self._transaction( reqIDsQuery )
if not reqIDs["OK"]:
self.log.error( "getRequest: %s" % reqIDs["Message"] )
return reqIDs
reqIDs = reqIDs["Value"][reqIDsQuery]
reqIDs = [ reqID["RequestID"] for reqID in reqIDs ]
if not reqIDs:
return S_OK()
random.shuffle( reqIDs )
requestID = reqIDs[0]
selectQuery = [ "SELECT * FROM `Request` WHERE `RequestID` = %s;" % requestID,
"SELECT * FROM `Operation` WHERE `RequestID` = %s;" % requestID ]
selectReq = self._transaction( selectQuery )
if not selectReq["OK"]:
self.log.error( "getRequest: %s" % selectReq["Message"] )
return S_ERROR( selectReq["Message"] )
selectReq = selectReq["Value"]
request = Request( selectReq[selectQuery[0]][0] )
for records in sorted( selectReq[selectQuery[1]], key = lambda k: k["Order"] ):
# # order is ro, remove
del records["Order"]
operation = Operation( records )
getFilesQuery = "SELECT * FROM `File` WHERE `OperationID` = %s;" % operation.OperationID
getFiles = self._transaction( getFilesQuery )
if not getFiles["OK"]:
self.log.error( "getRequest: %s" % getFiles["Message"] )
return getFiles
getFiles = getFiles["Value"][getFilesQuery]
for getFile in getFiles:
getFileDict = dict( [ ( key, value ) for key, value in getFile.items() if value != None ] )
operation.addFile( File( getFileDict ) )
request.addOperation( operation )
if assigned:
setAssigned = self._transaction( "UPDATE `Request` SET `Status` = 'Assigned' WHERE RequestID = %s;" % requestID )
if not setAssigned["OK"]:
self.log.error( "getRequest: %s" % setAssigned["Message"] )
return setAssigned
return S_OK( request )
def peekRequest( self, requestName ):
""" get request (ro), no update on states
:param str requestName: Request.RequestName
"""
return self.getRequest( requestName, False )
def getRequestNamesList( self, statusList = None, limit = None ):
""" select requests with status in :statusList: """
statusList = statusList if statusList else list( Request.FINAL_STATES )
limit = limit if limit else 100
query = "SELECT `RequestName`, `Status`, `LastUpdate` FROM `Request` WHERE "\
" `Status` IN (%s) ORDER BY `LastUpdate` DESC LIMIT %s;" % ( stringListToString( statusList ), limit )
reqNamesList = self._query( query )
if not reqNamesList["OK"]:
self.log.error( "getRequestNamesList: %s" % reqNamesList["Message"] )
return reqNamesList
reqNamesList = reqNamesList["Value"]
return S_OK( [ reqName for reqName in reqNamesList] )
def deleteRequest( self, requestName ):
""" delete request given its name
:param str requestName: request.RequestName
:param mixed connection: connection to use if any
"""
requestIDs = self._transaction(
"SELECT r.RequestID, o.OperationID FROM `Request` r LEFT JOIN `Operation` o "\
"ON r.RequestID = o.RequestID WHERE `RequestName` = '%s'" % requestName )
if not requestIDs["OK"]:
self.log.error( "deleteRequest: unable to read RequestID and OperationIDs: %s" % requestIDs["Message"] )
return requestIDs
requestIDs = requestIDs["Value"]
trans = []
requestID = None
for records in requestIDs.values():
for record in records:
requestID = record["RequestID"] if record["RequestID"] else None
operationID = record["OperationID"] if record["OperationID"] else None
if operationID and requestID:
trans.append( "DELETE FROM `File` WHERE `OperationID` = %s;" % operationID )<|fim▁hole|> trans.append( "DELETE FROM `Request` WHERE `RequestID` = %s;" % requestID )
delete = self._transaction( trans )
if not delete["OK"]:
self.log.error( "deleteRequest: unable to delete request '%s': %s" % ( requestName, delete["Message"] ) )
return delete
return S_OK()
def getRequestProperties( self, requestName, columnNames ):
""" submit query """
return self._query( self._getRequestProperties( requestName, columnNames ) )
def _getRequestProperties( self, requestName, columnNames = None ):
""" select :columnNames: from Request table """
columnNames = columnNames if columnNames else Request.tableDesc()["Fields"].keys()
columnNames = ",".join( [ '`%s`' % str( columnName ) for columnName in columnNames ] )
return "SELECT %s FROM `Request` WHERE `RequestName` = '%s';" % ( columnNames, requestName )
def _getOperationProperties( self, operationID, columnNames = None ):
""" select :columnNames: from Operation table """
columnNames = columnNames if columnNames else Operation.tableDesc()["Fields"].keys()
columnNames = ",".join( [ '`%s`' % str( columnName ) for columnName in columnNames ] )
return "SELECT %s FROM `Operation` WHERE `OperationID` = %s;" % ( columnNames, int( operationID ) )
def _getFileProperties( self, fileID, columnNames = None ):
""" select :columnNames: from File table """
columnNames = columnNames if columnNames else File.tableDesc()["Fields"].keys()
columnNames = ",".join( [ '`%s`' % str( columnName ) for columnName in columnNames ] )
return "SELECT %s FROM `File` WHERE `FileID` = %s;" % ( columnNames, int( fileID ) )
def getDBSummary( self ):
""" get db summary """
# # this will be returned
retDict = { "Request" : {}, "Operation" : {}, "File" : {} }
transQueries = { "SELECT `Status`, COUNT(`Status`) FROM `Request` GROUP BY `Status`;" : "Request",
"SELECT `Type`,`Status`,COUNT(`Status`) FROM `Operation` GROUP BY `Type`,`Status`;" : "Operation",
"SELECT `Status`, COUNT(`Status`) FROM `File` GROUP BY `Status`;" : "File" }
ret = self._transaction( transQueries.keys() )
if not ret["OK"]:
self.log.error( "getDBSummary: %s" % ret["Message"] )
return ret
ret = ret["Value"]
for k, v in ret.items():
if transQueries[k] == "Request":
for aDict in v:
status = aDict.get( "Status" )
count = aDict.get( "COUNT(`Status`)" )
if status not in retDict["Request"]:
retDict["Request"][status] = 0
retDict["Request"][status] += count
elif transQueries[k] == "File":
for aDict in v:
status = aDict.get( "Status" )
count = aDict.get( "COUNT(`Status`)" )
if status not in retDict["File"]:
retDict["File"][status] = 0
retDict["File"][status] += count
else: # # operation
for aDict in v:
status = aDict.get( "Status" )
oType = aDict.get( "Type" )
count = aDict.get( "COUNT(`Status`)" )
if oType not in retDict["Operation"]:
retDict["Operation"][oType] = {}
if status not in retDict["Operation"][oType]:
retDict["Operation"][oType][status] = 0
retDict["Operation"][oType][status] += count
return S_OK( retDict )
def getRequestSummaryWeb( self, selectDict, sortList, startItem, maxItems ):
""" get db summary for web
:param dict selectDict: whatever
:param list sortList: whatever
:param int startItem: limit
:param int maxItems: limit
"""
resultDict = {}
rparameterList = [ 'RequestID', 'RequestName', 'JobID', 'OwnerDN', 'OwnerGroup']
sparameterList = [ 'Type', 'Status', 'Operation']
parameterList = rparameterList + sparameterList + [ "Error", "CreationTime", "LastUpdate"]
# parameterList.append( 'Error' )
# parameterList.append( 'CreationTime' )
# parameterList.append( 'LastUpdateTime' )
req = "SELECT R.RequestID, R.RequestName, R.JobID, R.OwnerDN, R.OwnerGroup,"
req += "O.Type, O.Status, O.Type, O.Error, O.CreationTime, O.LastUpdate FROM Requests as R, Operation as O "
new_selectDict = {}
older = None
newer = None
for key, value in selectDict.items():
if key in rparameterList:
new_selectDict['R.' + key] = value
elif key in sparameterList:
new_selectDict['O.' + key] = value
elif key == 'ToDate':
older = value
elif key == 'FromDate':
newer = value
condition = ''
if new_selectDict or older or newer:
condition = self.__buildCondition( new_selectDict, older = older, newer = newer )
req += condition
if condition:
req += " AND R.RequestID=O.RequestID"
else:
req += " WHERE R.RequestID=O.RequestID"
if sortList:
req += " ORDER BY %s %s" % ( sortList[0][0], sortList[0][1] )
result = self._query( req )
if not result['OK']:
return result
if not result['Value']:
resultDict['ParameterNames'] = parameterList
resultDict['Records'] = []
return S_OK( resultDict )
nRequests = len( result['Value'] )
if startItem <= len( result['Value'] ):
firstIndex = startItem
else:
return S_ERROR( 'Requested index out of range' )
if ( startItem + maxItems ) <= len( result['Value'] ):
secondIndex = startItem + maxItems
else:
secondIndex = len( result['Value'] )
records = []
columnWidth = [ 0 for x in range( len( parameterList ) ) ]
for i in range( firstIndex, secondIndex ):
row = result['Value'][i]
records.append( [ str( x ) for x in row] )
for ind in range( len( row ) ):
if len( str( row[ind] ) ) > columnWidth[ind]:
columnWidth[ind] = len( str( row[ind] ) )
resultDict['ParameterNames'] = parameterList
resultDict['ColumnWidths'] = columnWidth
resultDict['Records'] = records
resultDict['TotalRecords'] = nRequests
return S_OK( resultDict )
def getRequestNamesForJobs( self, jobIDs ):
""" read request names for jobs given jobIDs
:param list jobIDs: list of jobIDs
"""
self.log.debug( "getRequestForJobs: got %s jobIDs to check" % str( jobIDs ) )
if not jobIDs:
return S_ERROR( "Must provide jobID list as argument." )
if type( jobIDs ) in ( long, int ):
jobIDs = [ jobIDs ]
jobIDs = list( set( [ int( jobID ) for jobID in jobIDs ] ) )
reqDict = { "Successful": {}, "Failed": {} }
# # filter out 0
jobIDsStr = ",".join( [ str( jobID ) for jobID in jobIDs if jobID ] )
# # request names
requestNames = "SELECT `RequestName`, `JobID` FROM `Request` WHERE `JobID` IN (%s);" % jobIDsStr
requestNames = self._query( requestNames )
if not requestNames["OK"]:
self.log.error( "getRequestsForJobs: %s" % requestNames["Message"] )
return requestNames
requestNames = requestNames["Value"]
for requestName, jobID in requestNames:
reqDict["Successful"][jobID] = requestName
reqDict["Failed"] = dict.fromkeys( [ jobID for jobID in jobIDs if jobID not in reqDict["Successful"] ],
"Request not found" )
return S_OK( reqDict )
def readRequestsForJobs( self, jobIDs = None ):
""" read request for jobs
:param list jobIDs: list of JobIDs
:return: S_OK( "Successful" : { jobID1 : Request, jobID2: Request, ... }
"Failed" : { jobID3: "error message", ... } )
"""
self.log.debug( "readRequestForJobs: got %s jobIDs to check" % str( jobIDs ) )
requestNames = self.getRequestNamesForJobs( jobIDs )
if not requestNames["OK"]:
self.log.error( "readRequestForJobs: %s" % requestNames["Message"] )
return requestNames
requestNames = requestNames["Value"]
# # this will be returned
retDict = { "Failed": requestNames["Failed"], "Successful": {} }
self.log.debug( "readRequestForJobs: got %d request names" % len( requestNames["Successful"] ) )
for jobID in requestNames['Successful']:
request = self.peekRequest( requestNames['Successful'][jobID] )
if not request["OK"]:
retDict["Failed"][jobID] = request["Message"]
continue
retDict["Successful"][jobID] = request["Value"]
return S_OK( retDict )
def getRequestStatus( self, requestName ):
""" get request status for a given request name """
self.log.debug( "getRequestStatus: checking status for '%s' request" % requestName )
query = "SELECT `Status` FROM `Request` WHERE `RequestName` = '%s'" % requestName
query = self._query( query )
if not query["OK"]:
self.log.error( "getRequestStatus: %s" % query["Message"] )
return query
requestStatus = query['Value'][0][0]
return S_OK( requestStatus )
def getRequestFileStatus( self, requestName, lfnList ):
""" get status for files in request given its name
:param str requestName: Request.RequestName
:param list lfnList: list of LFNs
"""
if type( requestName ) == int:
requestName = self.getRequestName( requestName )
if not requestName["OK"]:
self.log.error( "getRequestFileStatus: %s" % requestName["Message"] )
return requestName
else:
requestName = requestName["Value"]
req = self.peekRequest( requestName )
if not req["OK"]:
self.log.error( "getRequestFileStatus: %s" % req["Message"] )
return req
req = req["Value"]
res = dict.fromkeys( lfnList, "UNKNOWN" )
for op in req:
for opFile in op:
if opFile.LFN in lfnList:
res[opFile.LFN] = opFile.Status
return S_OK( res )
def getRequestInfo( self, requestName ):
""" get request info given Request.RequestID """
if type( requestName ) == int:
requestName = self.getRequestName( requestName )
if not requestName["OK"]:
self.log.error( "getRequestInfo: %s" % requestName["Message"] )
return requestName
else:
requestName = requestName["Value"]
requestInfo = self.getRequestProperties( requestName, [ "RequestID", "Status", "RequestName", "JobID",
"OwnerDN", "OwnerGroup", "DIRACSetup", "SourceComponent",
"CreationTime", "SubmitTime", "lastUpdate" ] )
if not requestInfo["OK"]:
self.log.error( "getRequestInfo: %s" % requestInfo["Message"] )
return requestInfo
requestInfo = requestInfo["Value"][0]
return S_OK( requestInfo )
def getDigest( self, requestName ):
""" get digest for request given its name
:param str requestName: request name
"""
self.log.debug( "getDigest: will create digest for request '%s'" % requestName )
request = self.getRequest( requestName, False )
if not request["OK"]:
self.log.error( "getDigest: %s" % request["Message"] )
request = request["Value"]
if not isinstance( request, Request ):
self.log.info( "getDigest: request '%s' not found" )
return S_OK()
return request.getDigest()
@staticmethod
def __buildCondition( condDict, older = None, newer = None ):
""" build SQL condition statement from provided condDict
and other extra conditions
blindly copied from old code, hope it works
"""
condition = ''
conjunction = "WHERE"
if condDict != None:
for attrName, attrValue in condDict.items():
if type( attrValue ) == list:
multiValue = ','.join( ['"' + x.strip() + '"' for x in attrValue] )
condition = ' %s %s %s in (%s)' % ( condition,
conjunction,
str( attrName ),
multiValue )
else:
condition = ' %s %s %s=\'%s\'' % ( condition,
conjunction,
str( attrName ),
str( attrValue ) )
conjunction = "AND"
if older:
condition = ' %s %s O.LastUpdate < \'%s\'' % ( condition,
conjunction,
str( older ) )
conjunction = "AND"
if newer:
condition = ' %s %s O.LastUpdate >= \'%s\'' % ( condition,
conjunction,
str( newer ) )
return condition<|fim▁end|> | trans.append( "DELETE FROM `Operation` WHERE `RequestID` = %s AND `OperationID` = %s;" % ( requestID,
operationID ) )
# # last bit: request itself
if requestID: |
<|file_name|>Mfdlsrtm.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
'Finite difference RTM as a linear operator'
<|fim▁hole|>
# Madagascar bin directory
bindir=os.path.join(rsf.prog.RSFROOT,'bin')
# Madagascar DATAPATH
datapath=rsf.path.datapath().rstrip('/')
# Madagascar commands
cp=os.path.join(bindir,'sfcp')
rtm=os.path.join(bindir,'sfmpifdlsrtm')
# Random files for input and output
inpd,inpfile=tempfile.mkstemp(dir=datapath)
outd,outfile=tempfile.mkstemp(dir=datapath)
p=subprocess.Popen([cp],stdout=inpd, close_fds=True)
p.wait()
run='ibrun tacc_affinity %s input=%s output=%s %s' %(rtm, inpfile, outfile,' '.join(sys.argv[1:]))
print run
os.system(run)
p=subprocess.Popen([cp],stdin=outd)
p.wait<|fim▁end|> | import os, sys, tempfile, subprocess
import rsf.prog, rsf.path |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import logging<|fim▁hole|>
logger = logging.getLogger(__name__)<|fim▁end|> | import os
from gym import error |
<|file_name|>retry.rs<|end_file_name|><|fim▁begin|>// Copyright (c) The Diem Core Contributors
// SPDX-License-Identifier: Apache-2.0
use crate::async_client::{defaults, Error};
use std::time::Duration;
pub trait RetryStrategy: std::fmt::Debug + Send + Sync {
fn max_retries(&self, err: &Error) -> u32;
fn delay(&self, err: &Error, retries: u32) -> Duration;
fn is_retriable(&self, err: &Error) -> bool;
}
#[derive(Debug)]
pub struct Retry {
pub max_retries: u32,
pub delay: Duration,
}
impl Retry {
pub fn default() -> Self {
Self {
max_retries: defaults::MAX_RETRIES,
delay: defaults::WAIT_DELAY,
}
}
}
impl RetryStrategy for Retry {
fn max_retries(&self, _: &Error) -> u32 {
self.max_retries
}
fn delay(&self, _: &Error, retries: u32) -> Duration {
self.delay * retries
}
fn is_retriable(&self, err: &Error) -> bool {
match err {
Error::StaleResponseError(_) => true,
Error::NetworkError(err) => err.is_timeout() || err.is_request(),
_ => false,<|fim▁hole|> }
}
}<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.