prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>optimize_dead_defs.rs<|end_file_name|><|fim▁begin|>use std::collections::{HashSet};
use grit;
pub fn optimize(mut prog: grit::ProgDef) -> grit::ProgDef {
let info = collect_prog_info(&prog);
prog.fun_defs = prog.fun_defs.into_iter().filter(|def| {
info.used_funs.contains(&def.name)
}).collect();
prog.obj_defs = prog.obj_defs.into_iter().filter(|def| {
info.used_objs.contains(&def.name)
}).collect();
prog
}
struct ProgInfo {
used_funs: HashSet<grit::FunName>,
used_objs: HashSet<grit::ObjName>,
}
fn collect_prog_info(prog: &grit::ProgDef) -> ProgInfo {
let mut info = ProgInfo {
used_funs: HashSet::new(),
used_objs: HashSet::new(),
};
info.used_funs.insert(prog.main_fun.clone());
let mut collected_funs = HashSet::new();
while info.used_funs.len() > collected_funs.len() {
for fun_def in prog.fun_defs.iter() {
let is_used = info.used_funs.contains(&fun_def.name);
let is_collected = collected_funs.contains(&fun_def.name);
if is_used && !is_collected {<|fim▁hole|> collected_funs.insert(fun_def.name.clone());
}
}
}
info
}
fn collect_fun(info: &mut ProgInfo, fun_def: &grit::FunDef) {
for block in fun_def.blocks.iter() {
for op in block.ops.iter() {
collect_op(info, op);
}
collect_jump(info, &block.jump);
}
}
fn collect_op(info: &mut ProgInfo, op: &grit::Op) {
match *op {
grit::Op::Call(_, ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Op::ExternCall(_, _, ref args) =>
for arg in args.iter() {
collect_val(info, arg);
},
grit::Op::AllocClos(ref closs) =>
for &(_, ref clos_name, ref captures) in closs.iter() {
info.used_funs.insert(clos_name.clone());
for capture in captures.iter() {
collect_val(info, capture);
}
},
grit::Op::Assign(ref var_vals) =>
for &(_, ref val) in var_vals.iter() {
collect_val(info, val);
},
}
}
fn collect_jump(info: &mut ProgInfo, jump: &grit::Jump) {
match *jump {
grit::Jump::Goto(_) => {},
grit::Jump::TailCall(ref callee, ref args) => {
collect_callee(info, callee);
for arg in args.iter() {
collect_val(info, arg);
}
},
grit::Jump::Return(ref val) |
grit::Jump::Branch(grit::Boolval::IsTrue(ref val), _, _) |
grit::Jump::Branch(grit::Boolval::IsFalse(ref val), _, _) =>
collect_val(info, val),
}
}
fn collect_val(info: &mut ProgInfo, val: &grit::Val) {
match *val {
grit::Val::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Val::Obj(ref obj_name) => {
info.used_objs.insert(obj_name.clone());
},
grit::Val::Var(_) |
grit::Val::Arg(_) |
grit::Val::Capture(_) |
grit::Val::Int(_) |
grit::Val::True |
grit::Val::False |
grit::Val::Undefined => (),
}
}
fn collect_callee(info: &mut ProgInfo, callee: &grit::Callee) {
match *callee {
grit::Callee::Combinator(ref fun_name) => {
info.used_funs.insert(fun_name.clone());
},
grit::Callee::KnownClosure(ref fun_name, ref val) => {
info.used_funs.insert(fun_name.clone());
collect_val(info, val);
},
grit::Callee::Unknown(ref val) =>
collect_val(info, val),
}
}<|fim▁end|>
|
collect_fun(&mut info, fun_def);
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>pub mod codegen;
pub mod jit;<|fim▁hole|><|fim▁end|>
|
pub mod cpuid;
|
<|file_name|>util.module.ts<|end_file_name|><|fim▁begin|>/// <reference path="sugar.ts" />
module vs.tools.util {
'use strict';
angular.module('vs.tools.util', [])<|fim▁hole|><|fim▁end|>
|
/* @ngInject */
.factory('sugar', (config, $http) => Sugar.getInstance(config, $http));
}
|
<|file_name|>ocr.py<|end_file_name|><|fim▁begin|>"""
Runs OCR on a given file.
"""
from os import system, listdir
from PIL import Image
from pytesseract import image_to_string
import editdistance
from constants import DATA_DIR
def classify(image, people_class, max_classify_distance=1, min_nonclassify_distance=3):
"""
Runs an OCR classifier on a given image file, drawing from a dictionary
"""
read = image_to_string(Image.open(image)).lower()
result = None
for person in people_class:
dist = editdistance.eval(person, read)
if dist <= max_classify_distance:
if result is not None:
return None
result = people_class[person]
elif max_classify_distance < dist <= min_nonclassify_distance:
return None<|fim▁hole|> """
Grabs names from a pdf to an image
"""
system("unzip {} -d {}/extract".format(raw_data, DATA_DIR))
base = DATA_DIR + "/extract/"
mainfolder = base + listdir(base)[0]
files = sorted(listdir(mainfolder))
p_bar = progress(len(files))
for index, path in enumerate(files):
p_bar.update(index)
fullpath = mainfolder + "/" + path
system("mkdir {}/ocr".format(DATA_DIR))
basic_format = r"pdftoppm -png -f 3 -l 3 -x 170 -y %s -W 900 -H 100 {} > {}/ocr/%s{}.png" \
.format(fullpath, DATA_DIR, index)
system(basic_format % (1030, "left"))
system(basic_format % (1115, "right"))<|fim▁end|>
|
return result
def setup_ocr(raw_data, progress):
|
<|file_name|>generic-arg-mismatch-recover.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct Foo<'a, T: 'a>(&'a T);
struct Bar<'a>(&'a ());
fn main() {
Foo::<'static, 'static, ()>(&0); //~ ERROR wrong number of lifetime arguments
//~^ ERROR mismatched types
Bar::<'static, 'static, ()>(&()); //~ ERROR wrong number of lifetime arguments
//~^ ERROR wrong number of type arguments
}<|fim▁end|>
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
|
<|file_name|>24.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
export { BatteryFull24 as default } from "../../";
|
<|file_name|>clipboard.js<|end_file_name|><|fim▁begin|>/**
* Copyright JS Foundation and other contributors, http://js.foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
RED.clipboard = (function() {
var dialog;
var dialogContainer;
var exportNodesDialog;
var importNodesDialog;
var disabled = false;
function setupDialogs() {
dialog = $('<div id="clipboard-dialog" class="hide node-red-dialog"><form class="dialog-form form-horizontal"></form></div>')
.appendTo("body")
.dialog({
modal: true,
autoOpen: false,<|fim▁hole|> width: 500,
resizable: false,
buttons: [
{
id: "clipboard-dialog-cancel",
text: RED._("common.label.cancel"),
click: function() {
$( this ).dialog( "close" );
}
},
{
id: "clipboard-dialog-close",
class: "primary",
text: RED._("common.label.close"),
click: function() {
$( this ).dialog( "close" );
}
},
{
id: "clipboard-dialog-copy",
class: "primary",
text: RED._("clipboard.export.copy"),
click: function() {
$("#clipboard-export").select();
document.execCommand("copy");
document.getSelection().removeAllRanges();
RED.notify(RED._("clipboard.nodesExported"));
$( this ).dialog( "close" );
}
},
{
id: "clipboard-dialog-ok",
class: "primary",
text: RED._("common.label.import"),
click: function() {
RED.view.importNodes($("#clipboard-import").val(),$("#import-tab > a.selected").attr('id') === 'import-tab-new');
$( this ).dialog( "close" );
}
}
],
open: function(e) {
$(this).parent().find(".ui-dialog-titlebar-close").hide();
},
close: function(e) {
}
});
dialogContainer = dialog.children(".dialog-form");
exportNodesDialog =
'<div class="form-row">'+
'<label style="width:auto;margin-right: 10px;" data-i18n="clipboard.export.copy"></label>'+
'<span id="export-range-group" class="button-group">'+
'<a id="export-range-selected" class="editor-button toggle" href="#" data-i18n="clipboard.export.selected"></a>'+
'<a id="export-range-flow" class="editor-button toggle" href="#" data-i18n="clipboard.export.current"></a>'+
'<a id="export-range-full" class="editor-button toggle" href="#" data-i18n="clipboard.export.all"></a>'+
'</span>'+
'</div>'+
'<div class="form-row">'+
'<textarea readonly style="resize: none; width: 100%; border-radius: 4px;font-family: monospace; font-size: 12px; background:#f3f3f3; padding-left: 0.5em; box-sizing:border-box;" id="clipboard-export" rows="5"></textarea>'+
'</div>'+
'<div class="form-row" style="text-align: right;">'+
'<span id="export-format-group" class="button-group">'+
'<a id="export-format-mini" class="editor-button editor-button-small toggle" href="#" data-i18n="clipboard.export.compact"></a>'+
'<a id="export-format-full" class="editor-button editor-button-small toggle" href="#" data-i18n="clipboard.export.formatted"></a>'+
'</span>'+
'</div>';
importNodesDialog = '<div class="form-row">'+
'<textarea style="resize: none; width: 100%; border-radius: 0px;font-family: monospace; font-size: 12px; background:#eee; padding-left: 0.5em; box-sizing:border-box;" id="clipboard-import" rows="5" placeholder="'+
RED._("clipboard.pasteNodes")+
'"></textarea>'+
'</div>'+
'<div class="form-row">'+
'<label style="width:auto;margin-right: 10px;" data-i18n="clipboard.import.import"></label>'+
'<span id="import-tab" class="button-group">'+
'<a id="import-tab-current" class="editor-button toggle selected" href="#" data-i18n="clipboard.export.current"></a>'+
'<a id="import-tab-new" class="editor-button toggle" href="#" data-i18n="clipboard.import.newFlow"></a>'+
'</span>'+
'</div>';
}
function validateImport() {
var importInput = $("#clipboard-import");
var v = importInput.val();
v = v.substring(v.indexOf('['),v.lastIndexOf(']')+1);
try {
JSON.parse(v);
importInput.removeClass("input-error");
importInput.val(v);
$("#clipboard-dialog-ok").button("enable");
} catch(err) {
if (v !== "") {
importInput.addClass("input-error");
}
$("#clipboard-dialog-ok").button("disable");
}
}
function importNodes() {
if (disabled) {
return;
}
dialogContainer.empty();
dialogContainer.append($(importNodesDialog));
dialogContainer.i18n();
$("#clipboard-dialog-ok").show();
$("#clipboard-dialog-cancel").show();
$("#clipboard-dialog-close").hide();
$("#clipboard-dialog-copy").hide();
$("#clipboard-dialog-ok").button("disable");
$("#clipboard-import").keyup(validateImport);
$("#clipboard-import").on('paste',function() { setTimeout(validateImport,10)});
$("#import-tab > a").click(function(evt) {
evt.preventDefault();
if ($(this).hasClass('disabled') || $(this).hasClass('selected')) {
return;
}
$(this).parent().children().removeClass('selected');
$(this).addClass('selected');
});
dialog.dialog("option","title",RED._("clipboard.importNodes")).dialog("open");
}
function exportNodes() {
if (disabled) {
return;
}
dialogContainer.empty();
dialogContainer.append($(exportNodesDialog));
dialogContainer.i18n();
$("#export-format-group > a").click(function(evt) {
evt.preventDefault();
if ($(this).hasClass('disabled') || $(this).hasClass('selected')) {
return;
}
$(this).parent().children().removeClass('selected');
$(this).addClass('selected');
var flow = $("#clipboard-export").val();
if (flow.length > 0) {
var nodes = JSON.parse(flow);
var format = $(this).attr('id');
if (format === 'export-format-full') {
flow = JSON.stringify(nodes,null,4);
} else {
flow = JSON.stringify(nodes);
}
$("#clipboard-export").val(flow);
}
});
$("#export-range-group > a").click(function(evt) {
evt.preventDefault();
if ($(this).hasClass('disabled') || $(this).hasClass('selected')) {
return;
}
$(this).parent().children().removeClass('selected');
$(this).addClass('selected');
var type = $(this).attr('id');
var flow = "";
var nodes = null;
if (type === 'export-range-selected') {
var selection = RED.view.selection();
nodes = RED.nodes.createExportableNodeSet(selection.nodes);
} else if (type === 'export-range-flow') {
var activeWorkspace = RED.workspaces.active();
nodes = RED.nodes.filterNodes({z:activeWorkspace});
var parentNode = RED.nodes.workspace(activeWorkspace)||RED.nodes.subflow(activeWorkspace);
nodes.unshift(parentNode);
nodes = RED.nodes.createExportableNodeSet(nodes);
} else if (type === 'export-range-full') {
nodes = RED.nodes.createCompleteNodeSet(false);
}
if (nodes !== null) {
if (RED.settings.flowFilePretty) {
flow = JSON.stringify(nodes,null,4);
} else {
flow = JSON.stringify(nodes);
}
}
if (flow.length > 0) {
$("#export-copy").removeClass('disabled');
} else {
$("#export-copy").addClass('disabled');
}
$("#clipboard-export").val(flow);
})
$("#clipboard-dialog-ok").hide();
$("#clipboard-dialog-cancel").hide();
$("#clipboard-dialog-copy").hide();
$("#clipboard-dialog-close").hide();
var selection = RED.view.selection();
if (selection.nodes) {
$("#export-range-selected").click();
} else {
$("#export-range-selected").addClass('disabled').removeClass('selected');
$("#export-range-flow").click();
}
if (RED.settings.flowFilePretty) {
$("#export-format-full").click();
} else {
$("#export-format-mini").click();
}
$("#clipboard-export")
.focus(function() {
var textarea = $(this);
textarea.select();
textarea.mouseup(function() {
textarea.unbind("mouseup");
return false;
})
});
dialog.dialog("option","title",RED._("clipboard.exportNodes")).dialog( "open" );
setTimeout(function() {
$("#clipboard-export").focus();
if (!document.queryCommandEnabled("copy")) {
$("#clipboard-dialog-cancel").hide();
$("#clipboard-dialog-close").show();
} else {
$("#clipboard-dialog-cancel").show();
$("#clipboard-dialog-copy").show();
}
},0);
}
function hideDropTarget() {
$("#dropTarget").hide();
RED.keyboard.remove("escape");
}
return {
init: function() {
setupDialogs();
RED.events.on("view:selection-changed",function(selection) {
if (!selection.nodes) {
RED.menu.setDisabled("menu-item-export",true);
RED.menu.setDisabled("menu-item-export-clipboard",true);
RED.menu.setDisabled("menu-item-export-library",true);
} else {
RED.menu.setDisabled("menu-item-export",false);
RED.menu.setDisabled("menu-item-export-clipboard",false);
RED.menu.setDisabled("menu-item-export-library",false);
}
});
RED.actions.add("core:show-export-dialog",exportNodes);
RED.actions.add("core:show-import-dialog",importNodes);
RED.events.on("editor:open",function() { disabled = true; });
RED.events.on("editor:close",function() { disabled = false; });
RED.events.on("search:open",function() { disabled = true; });
RED.events.on("search:close",function() { disabled = false; });
RED.events.on("type-search:open",function() { disabled = true; });
RED.events.on("type-search:close",function() { disabled = false; });
RED.events.on("palette-editor:open",function() { disabled = true; });
RED.events.on("palette-editor:close",function() { disabled = false; });
$('#chart').on("dragenter",function(event) {
if ($.inArray("text/plain",event.originalEvent.dataTransfer.types) != -1) {
$("#dropTarget").css({display:'table'});
RED.keyboard.add("*", "escape" ,hideDropTarget);
}
});
$('#dropTarget').on("dragover",function(event) {
if ($.inArray("text/plain",event.originalEvent.dataTransfer.types) != -1) {
event.preventDefault();
}
})
.on("dragleave",function(event) {
hideDropTarget();
})
.on("drop",function(event) {
var data = event.originalEvent.dataTransfer.getData("text/plain");
hideDropTarget();
data = data.substring(data.indexOf('['),data.lastIndexOf(']')+1);
RED.view.importNodes(data);
event.preventDefault();
});
},
import: importNodes,
export: exportNodes
}
})();<|fim▁end|>
| |
<|file_name|>scripts.js<|end_file_name|><|fim▁begin|>function infostart_rate(ob_id, vote)
{
$.get("/bitrix/components/infostart/objects_new.list/ajax_rate.php",
{ID:ob_id, TYPE:vote},
function(answer)
<|fim▁hole|> {
alert(answer);
});
}<|fim▁end|>
| |
<|file_name|>comp-1489.component.ts<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> *
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-comp-1489',
templateUrl: './comp-1489.component.html',
styleUrls: ['./comp-1489.component.css']
})
export class Comp1489Component implements OnInit {
constructor() { }
ngOnInit() {
}
}<|fim▁end|>
|
* @license
* Copyright Google Inc. All Rights Reserved.
|
<|file_name|>feed_parse_extractMiratlsWordpressCom.py<|end_file_name|><|fim▁begin|>def extractMiratlsWordpressCom(item):<|fim▁hole|> Parser for 'miratls.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False<|fim▁end|>
|
'''
|
<|file_name|>mapSVC.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module("services")
.factory('mapSVC', [ "colorsSVC", function(colorsSVC) {<|fim▁hole|> var setPathMode = function (mode) {
if(mode == "driving"){
return google.maps.DirectionsTravelMode.DRIVING;
}
else if(mode == "walking"){
return google.maps.DirectionsTravelMode.WALKING;
}
else if(mode == "transit"){
return google.maps.DirectionsTravelMode.TRANSIT;
}
else if(mode == "bicycling"){
return google.maps.DirectionsTravelMode.BICYCLING;
}
}
// Sets the colors of the chart, from RGB data to Hex
var setColors = function (colors) {
var util = [];
for(var i=0;i<colors.length;i++){
var rgb = colors[i];
var color = colorsSVC.rgbToHex(rgb.red, rgb.green, rgb.blue);
util.push(color);
}
return util;
}
var createPolyline = function (pathLine, color, map) {
return new google.maps.Polyline({ path: pathLine
, strokeColor: color
, strokeOpacity: 1.0
, strokeWeight: 3
, map: map
, visible: true
});
};
var createMarker = function (point, map) {
var marker = new MarkerWithLabel({ position: new google.maps.LatLng(point.latitude, point.longitude)
, map: map
, title: point.id.toString()
, icon: { url: 'aps/res/markerBI_24.png'
, size: new google.maps.Size(24,24)
, anchor: new google.maps.Point(12,12)
}
, zIndex: 1
, labelContent: point.id < 100 ? (point.id < 10 ? "T0" + point.id.toString() : "T" + point.id.toString()) : point.id.toString()
, labelAnchor: new google.maps.Point(8, 7)
, labelClass: "mrkLa"
, labelZIndex: 2
});
return marker;
}
var buildPath = function(path, color, map, polylines, method) {
var pathline = [];
var service = new google.maps.DirectionsService();
for(var i=0; i<path.length-1; i++) {
service.route({ origin: cnvLatLong(path[i]) // Consumes a point from the path
, destination: cnvLatLong(path[i+1]) // Recursively calls itself for the next points
, travelMode: setPathMode(method)
}
, function(result, status) { // Async Callback, gets the response from Google Maps Directions
if(status == google.maps.DirectionsStatus.OK) {
var path = result.routes[0].overview_path;
var legs = result.routes[0].legs;
for (var i=0;i<legs.length;i++) { // Parses the subroutes between two points
var steps = legs[i].steps;
for (var j=0;j<steps.length;j++) {
var nextSegment = steps[j].path;
for (var k=0;k<nextSegment.length;k++) { // Pushes the segment on the path
pathline.push(nextSegment[k]);
}
}
}
// Generates the Polyline of the calculated path
polylines.push(createPolyline(pathline,color,map));
pathline = [];
}
});
}
};
var resetMap = function (map, polylines, markers, id, position) {
for(var line in polylines){
polylines[line].setMap(null);
}
for(var marker in markers){
markers[marker].setMap(null);
}
};
return { createPolyline : createPolyline
, buildPath : buildPath
, setColors : setColors
, buildLegend : function (map, position, id) {
var legend = document.getElementById('mapLegend');
if (position == "top-right"){
if(map.controls[google.maps.ControlPosition.TOP_RIGHT].length > 0){
map.controls[google.maps.ControlPosition.TOP_RIGHT].pop();
}
map.controls[google.maps.ControlPosition.TOP_RIGHT].push(legend);
}
if (position == "top-left"){
if(map.controls[google.maps.ControlPosition.TOP_LEFT].length > 0){
map.controls[google.maps.ControlPosition.TOP_LEFT].pop();
}
map.controls[google.maps.ControlPosition.TOP_LEFT].push(legend);
}
if (position == "bottom-right"){
if(map.controls[google.maps.ControlPosition.BOTTOM_RIGHT].length > 0){
map.controls[google.maps.ControlPosition.BOTTOM_RIGHT].pop();
}
map.controls[google.maps.ControlPosition.BOTTOM_RIGHT].push(legend);
}
if (position == "bottom-left"){
if(map.controls[google.maps.ControlPosition.BOTTOM_LEFT].length > 0){
map.controls[google.maps.ControlPosition.BOTTOM_LEFT].pop();
}
map.controls[google.maps.ControlPosition.BOTTOM_LEFT].push(legend);
}
}
, createMarker: createMarker
, updateMovie : function (markers, newData, map) {
for(var marker in markers) {
var toBeRemoved = true; // It's always pending removal, except when it has to be updated!
for(var i=0;i<newData.newPoints.length && toBeRemoved == true;i++) {
// Then update it!
if(markers[marker].title == newData.newPoints[i].id.toString() || markers[marker].title == newData.newPoints[i].id){
markers[marker].setPosition(new google.maps.LatLng(newData.newPoints[i].latitude, newData.newPoints[i].longitude));
newData.newPoints.splice(i,1);
toBeRemoved = false;
}
}
if(toBeRemoved) {
// I guess its time has come
markers[marker].setMap();
delete markers[marker];
}
}
// A new life is born!
for(var obj in newData.newPoints) {
markers[newData.newPoints[obj].id] = createMarker(newData.newPoints[obj], map);
}
}
, resetMap: resetMap
}
}]);<|fim▁end|>
|
// Converts two coordinates to a GoogleLatLong
var cnvLatLong = function (x) {
return new google.maps.LatLng(x[0],x[1]);
};
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Testing.
See the [Testing](https://tensorflow.org/api_docs/python/tf/test) guide.
Note: `tf.compat.v1.test.mock` is an alias to the python `mock` or
`unittest.mock` depending on the python version.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=g-bad-import-order
from tensorflow.python.framework import test_util as _test_util<|fim▁hole|>
# pylint: disable=unused-import
from tensorflow.python.framework.test_util import assert_equal_graph_def
from tensorflow.python.framework.test_util import create_local_cluster
from tensorflow.python.framework.test_util import TensorFlowTestCase as TestCase
from tensorflow.python.framework.test_util import gpu_device_name
from tensorflow.python.framework.test_util import is_gpu_available
from tensorflow.python.ops.gradient_checker import compute_gradient_error
from tensorflow.python.ops.gradient_checker import compute_gradient
# pylint: enable=unused-import,g-bad-import-order
import sys
from tensorflow.python.util.tf_export import tf_export
if sys.version_info.major == 2:
import mock # pylint: disable=g-import-not-at-top,unused-import
else:
from unittest import mock # pylint: disable=g-import-not-at-top,g-importing-member
tf_export(v1=['test.mock'])(mock)
# Import Benchmark class
Benchmark = _googletest.Benchmark # pylint: disable=invalid-name
# Import StubOutForTesting class
StubOutForTesting = _googletest.StubOutForTesting # pylint: disable=invalid-name
@tf_export('test.main')
def main(argv=None):
"""Runs all unit tests."""
_test_util.InstallStackTraceHandler()
return _googletest.main(argv)
@tf_export(v1=['test.get_temp_dir'])
def get_temp_dir():
"""Returns a temporary directory for use during tests.
There is no need to delete the directory after the test.
Returns:
The temporary directory.
"""
return _googletest.GetTempDir()
@tf_export(v1=['test.test_src_dir_path'])
def test_src_dir_path(relative_path):
"""Creates an absolute test srcdir path given a relative path.
Args:
relative_path: a path relative to tensorflow root.
e.g. "core/platform".
Returns:
An absolute path to the linked in runfiles.
"""
return _googletest.test_src_dir_path(relative_path)
@tf_export('test.is_built_with_cuda')
def is_built_with_cuda():
"""Returns whether TensorFlow was built with CUDA (GPU) support."""
return _test_util.IsGoogleCudaEnabled()
@tf_export('test.is_built_with_rocm')
def is_built_with_rocm():
"""Returns whether TensorFlow was built with ROCm (GPU) support."""
return _test_util.IsBuiltWithROCm()
@tf_export('test.is_built_with_gpu_support')
def is_built_with_gpu_support():
"""Returns whether TensorFlow was built with GPU (i.e. CUDA or ROCm) support."""
return is_built_with_cuda() or is_built_with_rocm()
@tf_export('test.is_built_with_xla')
def is_built_with_xla():
"""Returns whether TensorFlow was built with XLA support."""
return _test_util.IsBuiltWithXLA()<|fim▁end|>
|
from tensorflow.python.platform import googletest as _googletest
|
<|file_name|>constants.py<|end_file_name|><|fim▁begin|>"""
Constants used across the ORM in general.
"""
# Separator used to split filter strings apart.<|fim▁hole|>LOOKUP_SEP = '__'<|fim▁end|>
| |
<|file_name|>delay_based_time_source_unittest.cc<|end_file_name|><|fim▁begin|>// Copyright 2011 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "components/viz/common/frame_sinks/delay_based_time_source.h"
#include <stdint.h>
#include "base/test/simple_test_tick_clock.h"
#include "base/test/test_simple_task_runner.h"
#include "components/viz/test/fake_delay_based_time_source.h"
#include "testing/gtest/include/gtest/gtest.h"
namespace viz {
namespace {
base::TimeDelta Interval() {
return base::Microseconds(base::Time::kMicrosecondsPerSecond / 60);
}
class DelayBasedTimeSourceTest : public ::testing::Test {
protected:
void SetUp() override {
now_src_ = std::make_unique<base::SimpleTestTickClock>();
task_runner_ = base::MakeRefCounted<base::TestSimpleTaskRunner>();
delay_based_time_source_ = std::make_unique<FakeDelayBasedTimeSource>(
now_src_.get(), task_runner_.get());
delay_based_time_source_->SetClient(&client_);
}
void TearDown() override {
delay_based_time_source_.reset();
task_runner_ = nullptr;
now_src_.reset();
}
void SetNow(base::TimeTicks ticks) { now_src_->SetNowTicks(ticks); }
base::TestSimpleTaskRunner* task_runner() { return task_runner_.get(); }
FakeDelayBasedTimeSource* timer() { return delay_based_time_source_.get(); }
FakeDelayBasedTimeSourceClient* client() { return &client_; }
std::unique_ptr<base::SimpleTestTickClock> now_src_;
FakeDelayBasedTimeSourceClient client_;
scoped_refptr<base::TestSimpleTaskRunner> task_runner_;
std::unique_ptr<FakeDelayBasedTimeSource> delay_based_time_source_;
};
TEST_F(DelayBasedTimeSourceTest, TaskPostedAndTickCalled) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
EXPECT_TRUE(timer()->Active());
EXPECT_TRUE(task_runner()->HasPendingTask());
SetNow(timer()->Now() + base::Milliseconds(16));
task_runner()->RunPendingTasks();
EXPECT_TRUE(timer()->Active());
EXPECT_TRUE(client()->TickCalled());
}
TEST_F(DelayBasedTimeSourceTest, TickNotCalledWithTaskPosted) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
EXPECT_TRUE(task_runner()->HasPendingTask());
timer()->SetActive(false);
task_runner()->RunPendingTasks();
EXPECT_FALSE(client()->TickCalled());
}
TEST_F(DelayBasedTimeSourceTest, StartTwiceEnqueuesOneTask) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
EXPECT_TRUE(task_runner()->HasPendingTask());
task_runner()->ClearPendingTasks();
timer()->SetActive(true);
EXPECT_FALSE(task_runner()->HasPendingTask());
}
TEST_F(DelayBasedTimeSourceTest, StartWhenRunningDoesntTick) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
EXPECT_TRUE(task_runner()->HasPendingTask());
task_runner()->RunPendingTasks();
task_runner()->ClearPendingTasks();
timer()->SetActive(true);
EXPECT_FALSE(task_runner()->HasPendingTask());
}
// At 60Hz, when the tick returns at exactly the requested next time, make sure
// a 16ms next delay is posted.
TEST_F(DelayBasedTimeSourceTest, NextDelaySaneWhenExactlyOnRequestedTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
// Run the first tick.
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
SetNow(timer()->Now() + Interval());
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
// At 60Hz, when the tick returns at slightly after the requested next time,
// make sure a 16ms next delay is posted.
TEST_F(DelayBasedTimeSourceTest, NextDelaySaneWhenSlightlyAfterRequestedTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
// Run the first tick.
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
SetNow(timer()->Now() + Interval() + base::Microseconds(1));
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
// At 60Hz, when the tick returns at exactly 2*interval after the requested next
// time, make sure we don't tick unnecessarily.
TEST_F(DelayBasedTimeSourceTest,
NextDelaySaneWhenExactlyTwiceAfterRequestedTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
// Run the first tick.
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
SetNow(timer()->Now() + 2 * Interval());
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
// At 60Hz, when the tick returns at 2*interval and a bit after the requested
// next time, make sure a 16ms next delay is posted.
TEST_F(DelayBasedTimeSourceTest,
NextDelaySaneWhenSlightlyAfterTwiceRequestedTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
// Run the first tick.
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
SetNow(timer()->Now() + 2 * Interval() + base::Microseconds(1));
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
// At 60Hz, when the tick returns halfway to the next frame time, make sure
// a correct next delay value is posted.
TEST_F(DelayBasedTimeSourceTest, NextDelaySaneWhenHalfAfterRequestedTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
// Run the first tick.
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
SetNow(timer()->Now() + Interval() + base::Milliseconds(8));
task_runner()->RunPendingTasks();
EXPECT_EQ(8, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
TEST_F(DelayBasedTimeSourceTest, JitteryRuntimeWithFutureTimebases) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
// Run the first tick.
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
base::TimeTicks future_timebase = timer()->Now() + Interval() * 10;
// 1ms jitter
base::TimeDelta jitter1 = base::Milliseconds(1);
// Tick with +1ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() + jitter1);
task_runner()->RunPendingTasks();
EXPECT_EQ(15, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with 0ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() - jitter1);
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
<|fim▁hole|> future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() - jitter1);
task_runner()->RunPendingTasks();
EXPECT_EQ(1, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with 0ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() + jitter1);
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
// 8 ms jitter
base::TimeDelta jitter8 = base::Milliseconds(8);
// Tick with +8ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() + jitter8);
task_runner()->RunPendingTasks();
EXPECT_EQ(8, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with 0ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() - jitter8);
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with -8ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() - jitter8);
task_runner()->RunPendingTasks();
EXPECT_EQ(8, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with 0ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() + jitter8);
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
// 15 ms jitter
base::TimeDelta jitter15 = base::Milliseconds(15);
// Tick with +15ms jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() + jitter15);
task_runner()->RunPendingTasks();
EXPECT_EQ(1, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with 0ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() - jitter15);
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with -15ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() - jitter15);
task_runner()->RunPendingTasks();
EXPECT_EQ(15, task_runner()->NextPendingTaskDelay().InMilliseconds());
// Tick with 0ms of jitter
future_timebase += Interval();
timer()->SetTimebaseAndInterval(future_timebase, Interval());
SetNow(timer()->Now() + Interval() + jitter15);
task_runner()->RunPendingTasks();
EXPECT_EQ(16, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
TEST_F(DelayBasedTimeSourceTest, AchievesTargetRateWithNoNoise) {
int num_iterations = 10;
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true);
double total_frame_time = 0.0;
for (int i = 0; i < num_iterations; ++i) {
int64_t delay_ms = task_runner()->NextPendingTaskDelay().InMilliseconds();
// accumulate the "delay"
total_frame_time += delay_ms / 1000.0;
// Run the callback exactly when asked
SetNow(timer()->Now() + base::Milliseconds(delay_ms));
task_runner()->RunPendingTasks();
}
double average_interval =
total_frame_time / static_cast<double>(num_iterations);
EXPECT_NEAR(1.0 / 60.0, average_interval, 0.1);
}
TEST_F(DelayBasedTimeSourceTest, TestDeactivateWhilePending) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
timer()->SetActive(true); // Should post a task.
timer()->SetActive(false);
// Should run the posted task without crashing.
EXPECT_TRUE(task_runner()->HasPendingTask());
task_runner()->RunPendingTasks();
}
TEST_F(DelayBasedTimeSourceTest,
TestDeactivateAndReactivateBeforeNextTickTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
// Should run the activate task, and pick up a new timebase.
timer()->SetActive(true);
task_runner()->RunPendingTasks();
// Stop the timer()
timer()->SetActive(false);
// Task will be pending anyway, run it
task_runner()->RunPendingTasks();
// Start the timer() again, but before the next tick time the timer()
// previously planned on using. That same tick time should still be targeted.
SetNow(timer()->Now() + base::Milliseconds(4));
timer()->SetActive(true);
EXPECT_EQ(12, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
TEST_F(DelayBasedTimeSourceTest, TestDeactivateAndReactivateAfterNextTickTime) {
timer()->SetTimebaseAndInterval(base::TimeTicks(), Interval());
// Should run the activate task, and pick up a new timebase.
timer()->SetActive(true);
task_runner()->RunPendingTasks();
// Stop the timer().
timer()->SetActive(false);
// Task will be pending anyway, run it.
task_runner()->RunPendingTasks();
// Start the timer() again, but before the next tick time the timer()
// previously planned on using. That same tick time should still be targeted.
SetNow(timer()->Now() + base::Milliseconds(20));
timer()->SetActive(true);
EXPECT_EQ(13, task_runner()->NextPendingTaskDelay().InMilliseconds());
}
} // namespace
} // namespace viz<|fim▁end|>
|
// Tick with -1ms of jitter
|
<|file_name|>teleop.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
PACKAGE_NAME = 'shiba_teleop'
import roslib
roslib.load_manifest(PACKAGE_NAME)
import rospy
from geometry_msgs.msg import Twist
from sensor_msgs.msg import Joy
import rospkg
FORWARD = 1
BACKWARDS = 2
SPINNING = 3
STOPPED = 4
linear_increment = 0.3
max_linear_vel = 1.0<|fim▁hole|>
angular_increment = 0.1
max_angular_vel = 0.6
spin_speed = 1.0
last_joy_message = None
linear_vel = 0.0
angular_vel = 0.0
last_angular_acceleration = 0
rotating = False
state = STOPPED<|fim▁end|>
|
min_linear_vel = -1.0
default_linear_vel = 0.1
|
<|file_name|>utils.d.ts<|end_file_name|><|fim▁begin|>export function makeRequest(method: any, auth: any, url: any, qs: any, data: any, cb: any): Promise<any>;
/**
* Freshdesk's API protocol violations
*
* @param {String} message Error message
* @param {Number} status HTTP status of the received Freshdesk-response. Could be useful for debugging<|fim▁hole|> */
export class FreshdeskError extends Error {
constructor(message: any, data: any, res: any);
data: any;
status: any;
apiTarget: string;
requestId: any;
}
/**
* Checks if value is null or undefined.
*
* @private
*
* @param {*} value The value to check.
* @return {boolean} Returns `true` if value is `null` or `undefined`; else `false`.
*
*/
export function isNil(value: any): boolean;
/**
* Checks if value is classified as a Function object.
*
* @private
*
* @param {*} value The value to check.
* @return {boolean} Returns `true` if value is a `function`; else `false`.
*/
export function isFunction(value: any): boolean;
export function createResponseHandler(cb: any): (error: any, response: any, body: any) => any;<|fim▁end|>
|
* @param {Object} data Parsed response of the Freshdesk API
|
<|file_name|>dantooine_voritor_hunter_lair_1.py<|end_file_name|><|fim▁begin|>import sys
from java.util import Vector
def addTemplate(core):
core.spawnService.addLairTemplate('dantooine_voritor_hunter_lair_2', 'slinking_voritor_hunter', 15, 'object/tangible/lair/base/poi_all_lair_bones.iff')<|fim▁hole|><|fim▁end|>
|
return
|
<|file_name|>repository.go<|end_file_name|><|fim▁begin|>package yum
import (
"bytes"
"encoding/xml"
"fmt"
"io"
"io/ioutil"
"math"
"os"
"path/filepath"
"time"
"github.com/gonuts/logger"
)
// List of packages to ignore for our case
var IGNORED_PACKAGES = []string{
"rpmlib(CompressedFileNames)", "/bin/sh", "rpmlib(PayloadFilesHavePrefix)",
"rpmlib(PartialHardlinkSets)",
}
// Repository represents a YUM repository with all associated metadata.
type Repository struct {
msg *logger.Logger
Name string
RepoUrl string
RepoMdUrl string
LocalRepoMdXml string
CacheDir string
Backends []string
Backend Backend
}
// NewRepository create a new Repository with name and from url.
func NewRepository(name, url, cachedir string, backends []string, setupBackend, checkForUpdates bool) (*Repository, error) {
repo := Repository{
msg: logger.NewLogger("repo", logger.INFO, os.Stdout),
Name: name,
RepoUrl: url,
RepoMdUrl: url + "/repodata/repomd.xml",
LocalRepoMdXml: filepath.Join(cachedir, "repomd.xml"),
CacheDir: cachedir,
Backends: make([]string, len(backends)),
}
copy(repo.Backends, backends)
err := os.MkdirAll(cachedir, 0644)
if err != nil {
return nil, err
}
// load appropriate backend if requested
if setupBackend {
if checkForUpdates {
err = repo.setupBackendFromRemote()
if err != nil {
return nil, err
}
} else {
err = repo.setupBackendFromLocal()
if err != nil {
return nil, err
}
}
}
return &repo, err
}
// Close cleans up after use
func (repo *Repository) Close() error {
return repo.Backend.Close()
}
// FindLatestMatchingName locats a package by name, returns the latest available version.
func (repo *Repository) FindLatestMatchingName(name, version, release string) (*Package, error) {
return repo.Backend.FindLatestMatchingName(name, version, release)
}
// FindLatestMatchingRequire locates a package providing a given functionality.
func (repo *Repository) FindLatestMatchingRequire(requirement *Requires) (*Package, error) {
return repo.Backend.FindLatestMatchingRequire(requirement)
}
// GetPackages returns all the packages known by a YUM repository
func (repo *Repository) GetPackages() []*Package {
return repo.Backend.GetPackages()
}
// setupBackendFromRemote checks which backend should be used and updates the DB files.
func (repo *Repository) setupBackendFromRemote() error {
repo.msg.Debugf("setupBackendFromRemote...\n")
var err error
var backend Backend
<|fim▁hole|> }
remotemd, err := repo.checkRepoMD(remotedata)
if err != nil {
return err
}
localdata, err := repo.localMetadata()
if err != nil {
return err
}
localmd, err := repo.checkRepoMD(localdata)
if err != nil {
return err
}
for _, bname := range repo.Backends {
repo.msg.Debugf("checking availability of backend [%s]\n", bname)
ba, err := NewBackend(bname, repo)
if err != nil {
ba.Close()
continue
}
rrepomd, ok := remotemd[ba.YumDataType()]
if !ok {
repo.msg.Warnf("remote repository does not provide [%s] DB\n", bname)
continue
}
// a priori a match
backend = ba
repo.Backend = backend
lrepomd, ok := localmd[ba.YumDataType()]
if !ok {
// doesn't matter, we download the DB in any case
}
if !repo.Backend.HasDB() || rrepomd.Timestamp.After(lrepomd.Timestamp) {
// we need to update the DB
url := repo.RepoUrl + "/" + rrepomd.Location
repo.msg.Debugf("updating the RPM database for %s\n", bname)
err = repo.Backend.GetLatestDB(url)
if err != nil {
repo.msg.Warnf("problem updating RPM database for backend [%s]: %v\n", bname, err)
err = nil
backend = nil
repo.Backend = nil
continue
}
// save metadata to local repomd file
err = ioutil.WriteFile(repo.LocalRepoMdXml, remotedata, 0644)
if err != nil {
repo.msg.Warnf("problem updating local repomd.xml file for backend [%s]: %v\n", bname, err)
err = nil
backend = nil
repo.Backend = nil
continue
}
}
// load data necessary for the backend
err = repo.Backend.LoadDB()
if err != nil {
repo.msg.Warnf("problem loading data for backend [%s]: %v\n", bname, err)
err = nil
backend = nil
repo.Backend = nil
continue
}
// stop at first one found
break
}
if backend == nil {
repo.msg.Errorf("No valid backend found\n")
return fmt.Errorf("No valid backend found")
}
repo.msg.Debugf("repository [%s] - chosen backend [%T]\n", repo.Name, repo.Backend)
return err
}
func (repo *Repository) setupBackendFromLocal() error {
repo.msg.Debugf("setupBackendFromLocal...\n")
var err error
data, err := repo.localMetadata()
if err != nil {
return err
}
md, err := repo.checkRepoMD(data)
if err != nil {
return err
}
var backend Backend
for _, bname := range repo.Backends {
repo.msg.Debugf("checking availability of backend [%s]\n", bname)
ba, err := NewBackend(bname, repo)
if err != nil {
continue
}
_ /*repomd*/, ok := md[ba.YumDataType()]
if !ok {
repo.msg.Warnf("local repository does not provide [%s] DB\n", bname)
continue
}
// a priori a match
backend = ba
repo.Backend = backend
// loading data necessary for the backend
err = repo.Backend.LoadDB()
if err != nil {
repo.msg.Warnf("problem loading data for backend [%s]: %v\n", bname, err)
err = nil
backend = nil
repo.Backend = nil
continue
}
// stop at first one found.
break
}
if backend == nil {
repo.msg.Errorf("No valid backend found\n")
return fmt.Errorf("No valid backend found")
}
repo.msg.Debugf("repository [%s] - chosen backend [%T]\n", repo.Name, repo.Backend)
return err
}
// remoteMetadata retrieves the repo metadata file content
func (repo *Repository) remoteMetadata() ([]byte, error) {
r, err := getRemoteData(repo.RepoMdUrl)
if err != nil {
return nil, err
}
defer r.Close()
buf := new(bytes.Buffer)
_, err = io.Copy(buf, r)
if err != nil && err != io.EOF {
return nil, err
}
return buf.Bytes(), err
}
// localMetadata retrieves the repo metadata from the repomd file
func (repo *Repository) localMetadata() ([]byte, error) {
if !path_exists(repo.LocalRepoMdXml) {
return nil, nil
}
f, err := os.Open(repo.LocalRepoMdXml)
if err != nil {
return nil, err
}
defer f.Close()
buf := new(bytes.Buffer)
_, err = io.Copy(buf, f)
if err != nil && err != io.EOF {
return nil, err
}
return buf.Bytes(), err
}
// checkRepoMD parses the Repository metadata XML content
func (repo *Repository) checkRepoMD(data []byte) (map[string]RepoMD, error) {
if len(data) <= 0 {
repo.msg.Debugf("checkRepoMD: no data\n")
return nil, nil
}
type xmlTree struct {
XMLName xml.Name `xml:"repomd"`
Data []struct {
Type string `xml:"type,attr"`
Checksum string `xml:"checksum"`
Location struct {
Href string `xml:"href,attr"`
} `xml:"location"`
Timestamp float64 `xml:"timestamp"`
} `xml:"data"`
}
var tree xmlTree
err := xml.Unmarshal(data, &tree)
if err != nil {
return nil, err
}
db := make(map[string]RepoMD)
for _, data := range tree.Data {
sec := int64(math.Floor(data.Timestamp))
nsec := int64((data.Timestamp - float64(sec)) * 1e9)
db[data.Type] = RepoMD{
Checksum: data.Checksum,
Timestamp: time.Unix(sec, nsec),
Location: data.Location.Href,
}
}
return db, err
}
type RepoMD struct {
Checksum string
Timestamp time.Time
Location string
}
// EOF<|fim▁end|>
|
// get repo metadata with list of available files
remotedata, err := repo.remoteMetadata()
if err != nil {
return err
|
<|file_name|>input.rs<|end_file_name|><|fim▁begin|>#[allow(dead_code)]
pub static EXAMPLE_INPUT: &'static str =
"swap position 4 with position 0
swap letter d with letter b
reverse positions 0 through 4
rotate left 1 step
move position 1 to position 4<|fim▁hole|>rotate based on position of letter d";
#[allow(dead_code)]
pub static PUZZLE_INPUT: &'static str = "rotate right 3 steps
swap letter b with letter a
move position 3 to position 4
swap position 0 with position 7
swap letter f with letter h
rotate based on position of letter f
rotate based on position of letter b
swap position 3 with position 0
swap position 6 with position 1
move position 4 to position 0
rotate based on position of letter d
swap letter d with letter h
reverse positions 5 through 6
rotate based on position of letter h
reverse positions 4 through 5
move position 3 to position 6
rotate based on position of letter e
rotate based on position of letter c
rotate right 2 steps
reverse positions 5 through 6
rotate right 3 steps
rotate based on position of letter b
rotate right 5 steps
swap position 5 with position 6
move position 6 to position 4
rotate left 0 steps
swap position 3 with position 5
move position 4 to position 7
reverse positions 0 through 7
rotate left 4 steps
rotate based on position of letter d
rotate left 3 steps
swap position 0 with position 7
rotate based on position of letter e
swap letter e with letter a
rotate based on position of letter c
swap position 3 with position 2
rotate based on position of letter d
reverse positions 2 through 4
rotate based on position of letter g
move position 3 to position 0
move position 3 to position 5
swap letter b with letter d
reverse positions 1 through 5
reverse positions 0 through 1
rotate based on position of letter a
reverse positions 2 through 5
swap position 1 with position 6
swap letter f with letter e
swap position 5 with position 1
rotate based on position of letter a
move position 1 to position 6
swap letter e with letter d
reverse positions 4 through 7
swap position 7 with position 5
swap letter c with letter g
swap letter e with letter g
rotate left 4 steps
swap letter c with letter a
rotate left 0 steps
swap position 0 with position 1
reverse positions 1 through 4
rotate based on position of letter d
swap position 4 with position 2
rotate right 0 steps
swap position 1 with position 0
swap letter c with letter a
swap position 7 with position 3
swap letter a with letter f
reverse positions 3 through 7
rotate right 1 step
swap letter h with letter c
move position 1 to position 3
swap position 4 with position 2
rotate based on position of letter b
reverse positions 5 through 6
move position 5 to position 3
swap letter b with letter g
rotate right 6 steps
reverse positions 6 through 7
swap position 2 with position 5
rotate based on position of letter e
swap position 1 with position 7
swap position 1 with position 5
reverse positions 2 through 7
reverse positions 5 through 7
rotate left 3 steps
rotate based on position of letter b
rotate left 3 steps
swap letter e with letter c
rotate based on position of letter a
swap letter f with letter a
swap position 0 with position 6
swap position 4 with position 7
reverse positions 0 through 5
reverse positions 3 through 5
swap letter d with letter e
move position 0 to position 7
move position 1 to position 3
reverse positions 4 through 7";<|fim▁end|>
|
move position 3 to position 0
rotate based on position of letter b
|
<|file_name|>test_ofstats.py<|end_file_name|><|fim▁begin|># Copyright 2011 Midokura KK
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unittests for ofstats.py.
"""
import struct
import unittest2
from openfaucet import buffer
from openfaucet import ofaction
from openfaucet import ofmatch
from openfaucet import ofstats
class TestDescriptionStats(unittest2.TestCase):
def setUp(self):
self.buf = buffer.ReceiveBuffer()
self.mfr_desc = 'Dummy Manufacturer Inc.'
self.hw_desc = 'DummySwitch'
self.sw_desc = 'DummyOS'
self.serial_num = '0000000042'
self.dp_desc = 'unittest switch'
self.desc_stats = ofstats.DescriptionStats(
mfr_desc=self.mfr_desc,
hw_desc=self.hw_desc,
sw_desc=self.sw_desc,
serial_num=self.serial_num,
dp_desc=self.dp_desc)
def test_serialize(self):
self.assertEqual(
self.mfr_desc + '\x00' * (256 - len(self.mfr_desc))
+ self.hw_desc + '\x00' * (256 - len(self.hw_desc))
+ self.sw_desc + '\x00' * (256 - len(self.sw_desc))
+ self.serial_num + '\x00' * (32 - len(self.serial_num))
+ self.dp_desc + '\x00' * (256 - len(self.dp_desc)),
self.desc_stats.serialize())
def test_deserialize(self):
self.buf.append(self.mfr_desc + '\x00' * (256 - len(self.mfr_desc)))
self.buf.append(self.hw_desc + '\x00' * (256 - len(self.hw_desc)))
self.buf.append(self.sw_desc + '\x00' * (256 - len(self.sw_desc)))
self.buf.append(self.serial_num + '\x00' * (32 - len(self.serial_num)))
self.buf.append(self.dp_desc + '\x00' * (256 - len(self.dp_desc)))
self.buf.set_message_boundaries(1056)
self.assertTupleEqual(self.desc_stats,
ofstats.DescriptionStats.deserialize(self.buf))<|fim▁hole|>class TestFlowStats(unittest2.TestCase):
def setUp(self):
self.buf = buffer.ReceiveBuffer()
self.match = ofmatch.Match(
in_port=0x13, dl_src='\x13\x24\x35\x46\x57\x68',
dl_dst='\x12\x23\x34\x45\x56\x67', dl_vlan=0x11, dl_vlan_pcp=0x22,
dl_type=0x3344, nw_tos=0x80, nw_proto=0xcc,
nw_src=('\xaa\xbb\xcc\xdd', 32), nw_dst=('\x21\x32\x43\x54', 32),
tp_src=0x38, tp_dst=0x49)
self.flow_stats = ofstats.FlowStats(
0xac, self.match, 0x10203040, 0x11223344, 0x1002, 0x0136, 0x0247,
0xffeeddccbbaa9988, 0x42, 0x0153, (
ofaction.ActionOutput(port=0x1234, max_len=0x9abc),
ofaction.ActionSetDlDst(dl_addr='\x12\x34\x56\x78\xab\xcd')))
def _serialize_action(self, a):
a_ser = a.serialize()
header = struct.pack('!HH', a.type, 4 + len(a_ser))
return (header, a_ser)
def _deserialize_action(self, buf):
action_type, action_length = buf.unpack('!HH')
action_class = ofaction.ACTION_CLASSES.get(action_type)
return action_class.deserialize(buf)
def test_serialize(self):
self.assertEqual(
'\x00\x70' '\xac\x00'
+ self.match.serialize()
+ '\x10\x20\x30\x40' '\x11\x22\x33\x44'
'\x10\x02' '\x01\x36' '\x02\x47'
'\x00\x00\x00\x00\x00\x00'
'\xff\xee\xdd\xcc\xbb\xaa\x99\x88'
'\x00\x00\x00\x00\x00\x00\x00\x42'
'\x00\x00\x00\x00\x00\x00\x01\x53'
'\x00\x00\x00\x08'
'\x12\x34\x9a\xbc'
'\x00\x05\x00\x10'
'\x12\x34\x56\x78\xab\xcd\x00\x00\x00\x00\x00\x00',
''.join(self.flow_stats.serialize(self._serialize_action)))
def test_deserialize(self):
self.buf.append('\x00\x70' '\xac\x00')
self.buf.append(self.match.serialize())
self.buf.append(
'\x10\x20\x30\x40' '\x11\x22\x33\x44'
'\x10\x02' '\x01\x36' '\x02\x47'
'\x00\x00\x00\x00\x00\x00'
'\xff\xee\xdd\xcc\xbb\xaa\x99\x88'
'\x00\x00\x00\x00\x00\x00\x00\x42'
'\x00\x00\x00\x00\x00\x00\x01\x53'
'\x00\x00\x00\x08'
'\x12\x34\x9a\xbc'
'\x00\x05\x00\x10'
'\x12\x34\x56\x78\xab\xcd\x00\x00\x00\x00\x00\x00')
self.buf.set_message_boundaries(112)
self.assertTupleEqual(
self.flow_stats,
ofstats.FlowStats.deserialize(self.buf, self._deserialize_action))
class TestTableStats(unittest2.TestCase):
def setUp(self):
self.buf = buffer.ReceiveBuffer()
self.wildcards = ofmatch.Wildcards(
in_port=True, dl_src=True, dl_dst=True, dl_vlan=True,
dl_vlan_pcp=True, dl_type=True, nw_tos=False, nw_proto=False,
nw_src=0, nw_dst=0, tp_src=False, tp_dst=False)
self.table_stats = ofstats.TableStats(
0xac, 'eth_wildcards', self.wildcards, 0x100000, 0x1234, 0x5678,
0x9abcd)
def test_serialize(self):
self.assertEqual('\xac\x00\x00\x00'
'eth_wildcards' + '\x00' * (32 - len('eth_wildcards'))
+ '\x00\x10\x00\x1f'
'\x00\x10\x00\x00' '\x00\x00\x12\x34'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x09\xab\xcd',
''.join(self.table_stats.serialize()))
def test_deserialize(self):
self.buf.append('\xac\x00\x00\x00'
'eth_wildcards')
self.buf.append('\x00' * (32 - len('eth_wildcards')))
self.buf.append('\x00\x10\x00\x1f'
'\x00\x10\x00\x00' '\x00\x00\x12\x34'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x09\xab\xcd')
self.buf.set_message_boundaries(64)
self.assertTupleEqual(self.table_stats,
ofstats.TableStats.deserialize(self.buf))
class TestPortStats(unittest2.TestCase):
def setUp(self):
self.buf = buffer.ReceiveBuffer()
self.port_stats = ofstats.PortStats(
port_no=0xabcd, rx_packets=0x1234, tx_packets=0x5678,
rx_bytes=0x1324, tx_bytes=0x5768, rx_dropped=0x1a2b,
tx_dropped=0x3c4d, rx_errors=0xab12, tx_errors=0xcd34,
rx_frame_err=0x1432, rx_over_err=0x2543, rx_crc_err=0x3654,
collisions=0x4765)
def test_serialize(self):
self.assertEqual('\xab\xcd\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x12\x34'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x00\x13\x24'
'\x00\x00\x00\x00\x00\x00\x57\x68'
'\x00\x00\x00\x00\x00\x00\x1a\x2b'
'\x00\x00\x00\x00\x00\x00\x3c\x4d'
'\x00\x00\x00\x00\x00\x00\xab\x12'
'\x00\x00\x00\x00\x00\x00\xcd\x34'
'\x00\x00\x00\x00\x00\x00\x14\x32'
'\x00\x00\x00\x00\x00\x00\x25\x43'
'\x00\x00\x00\x00\x00\x00\x36\x54'
'\x00\x00\x00\x00\x00\x00\x47\x65',
self.port_stats.serialize())
def test_serialize_every_counter_unavailable(self):
index = 8
port_stats_ser = ('\xab\xcd\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x12\x34'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x00\x13\x24'
'\x00\x00\x00\x00\x00\x00\x57\x68'
'\x00\x00\x00\x00\x00\x00\x1a\x2b'
'\x00\x00\x00\x00\x00\x00\x3c\x4d'
'\x00\x00\x00\x00\x00\x00\xab\x12'
'\x00\x00\x00\x00\x00\x00\xcd\x34'
'\x00\x00\x00\x00\x00\x00\x14\x32'
'\x00\x00\x00\x00\x00\x00\x25\x43'
'\x00\x00\x00\x00\x00\x00\x36\x54'
'\x00\x00\x00\x00\x00\x00\x47\x65')
for attr in ('rx_packets', 'tx_packets', 'rx_bytes', 'tx_bytes',
'rx_dropped', 'tx_dropped', 'rx_errors', 'tx_errors',
'rx_frame_err', 'rx_over_err', 'rx_crc_err',
'collisions'):
ps = self.port_stats._replace(**{attr: None}) # set as unavailable
self.assertEqual(port_stats_ser[:index]
+ '\xff\xff\xff\xff\xff\xff\xff\xff'
+ port_stats_ser[index + 8:], ps.serialize())
index += 8
def test_deserialize(self):
self.buf.append('\xab\xcd\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x12\x34'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x00\x13\x24'
'\x00\x00\x00\x00\x00\x00\x57\x68'
'\x00\x00\x00\x00\x00\x00\x1a\x2b'
'\x00\x00\x00\x00\x00\x00\x3c\x4d'
'\x00\x00\x00\x00\x00\x00\xab\x12'
'\x00\x00\x00\x00\x00\x00\xcd\x34'
'\x00\x00\x00\x00\x00\x00\x14\x32'
'\x00\x00\x00\x00\x00\x00\x25\x43'
'\x00\x00\x00\x00\x00\x00\x36\x54'
'\x00\x00\x00\x00\x00\x00\x47\x65')
self.buf.set_message_boundaries(104)
self.assertTupleEqual(self.port_stats,
ofstats.PortStats.deserialize(self.buf))
def test_deserialize_every_counter_unavailable(self):
index = 8
port_stats_ser = ('\xab\xcd\x00\x00\x00\x00\x00\x00'
'\x00\x00\x00\x00\x00\x00\x12\x34'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x00\x13\x24'
'\x00\x00\x00\x00\x00\x00\x57\x68'
'\x00\x00\x00\x00\x00\x00\x1a\x2b'
'\x00\x00\x00\x00\x00\x00\x3c\x4d'
'\x00\x00\x00\x00\x00\x00\xab\x12'
'\x00\x00\x00\x00\x00\x00\xcd\x34'
'\x00\x00\x00\x00\x00\x00\x14\x32'
'\x00\x00\x00\x00\x00\x00\x25\x43'
'\x00\x00\x00\x00\x00\x00\x36\x54'
'\x00\x00\x00\x00\x00\x00\x47\x65')
for attr in ('rx_packets', 'tx_packets', 'rx_bytes', 'tx_bytes',
'rx_dropped', 'tx_dropped', 'rx_errors', 'tx_errors',
'rx_frame_err', 'rx_over_err', 'rx_crc_err',
'collisions'):
self.buf.append(port_stats_ser[:index])
self.buf.append('\xff\xff\xff\xff\xff\xff\xff\xff')
self.buf.append(port_stats_ser[index + 8:])
self.buf.set_message_boundaries(104)
self.assertTupleEqual(self.port_stats._replace(**{attr: None}),
ofstats.PortStats.deserialize(self.buf))
index += 8
class TestQueueStats(unittest2.TestCase):
def setUp(self):
self.buf = buffer.ReceiveBuffer()
self.queue_stats = ofstats.QueueStats(
port_no=0xabcd, queue_id=0x10203040, tx_bytes=0x5768,
tx_packets=0x5678, tx_errors=0xcd34)
def test_serialize(self):
self.assertEqual('\xab\xcd\x00\x00' '\x10\x20\x30\x40'
'\x00\x00\x00\x00\x00\x00\x57\x68'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x00\xcd\x34',
self.queue_stats.serialize())
def test_deserialize(self):
self.buf.append('\xab\xcd\x00\x00' '\x10\x20\x30\x40'
'\x00\x00\x00\x00\x00\x00\x57\x68'
'\x00\x00\x00\x00\x00\x00\x56\x78'
'\x00\x00\x00\x00\x00\x00\xcd\x34')
self.buf.set_message_boundaries(32)
self.assertTupleEqual(self.queue_stats,
ofstats.QueueStats.deserialize(self.buf))
if __name__ == '__main__':
unittest2.main()<|fim▁end|>
| |
<|file_name|>gsproxy.go<|end_file_name|><|fim▁begin|>package gsproxy
import (
"fmt"
"math/big"
"sync"
"time"
"github.com/gsdocker/gsconfig"
"github.com/gsdocker/gslogger"
"github.com/gsrpc/gorpc"
"github.com/gsrpc/gorpc/handler"
)
var (
dhHandler = "gsproxy-dh"
transProxyHandler = "gsproxy-trans"
tunnelHandler = "gsproxy-tunnel"
)
// Context .
type Context interface {
String() string
// Close close proxy
Close()
// get frontend acceptor
Acceptor() *gorpc.Acceptor
}
// Server server
type Server gorpc.Pipeline
// Client proxy client
type Client interface {
AddService(dispatcher gorpc.Dispatcher)
RemoveService(dispatcher gorpc.Dispatcher)
// TransproxyBind bind transproxy service by id
TransproxyBind(id uint16, server Server)
// Unbind unbind transproxy service by id
TransproxyUnbind(id uint16)
// Device get device name
Device() *gorpc.Device
}
// Proxy .
type Proxy interface {
// Register register current proxy server
Register(context Context) error
// Unregister unregister proxy
Unregister(context Context)
// BindServices add server to proxy session
BindServices(context Context, server Server, services []*gorpc.NamedService) error
// UnbindServices remote server from proxy session
UnbindServices(context Context, server Server)
// AddClient add client to proxy session
AddClient(context Context, client Client) error
// RemoveClient remote client from proxy session
RemoveClient(context Context, client Client)
}
// ProxyBuilder gsproxy builder
type ProxyBuilder struct {
laddrF string // frontend tcp listen address
laddrE string // backend tcp listen address
timeout time.Duration // rpc timeout
dhkeyResolver handler.DHKeyResolver // dhkey resolver
proxy Proxy // proxy provider
}
// BuildProxy create new proxy builder
func BuildProxy(proxy Proxy) *ProxyBuilder {
gStr := gsconfig.String("gsproxy.dhkey.G", "6849211231874234332173554215962568648211715948614349192108760170867674332076420634857278025209099493881977517436387566623834457627945222750416199306671083")
pStr := gsconfig.String("gsproxy.dhkey.P", "13196520348498300509170571968898643110806720751219744788129636326922565480984492185368038375211941297871289403061486510064429072584259746910423138674192557")
G, _ := new(big.Int).SetString(gStr, 0)
P, _ := new(big.Int).SetString(pStr, 0)
return &ProxyBuilder{
laddrF: gsconfig.String("gsproxy.frontend.laddr", ":13512"),
laddrE: gsconfig.String("gsproxy.backend.laddr", ":15827"),
timeout: gsconfig.Seconds("gsproxy.rpc.timeout", 5),
dhkeyResolver: handler.DHKeyResolve(func(device *gorpc.Device) (*handler.DHKey, error) {
return handler.NewDHKey(G, P), nil
}),
proxy: proxy,
}
}
// AddrF set frontend listen address
func (builder *ProxyBuilder) AddrF(laddr string) *ProxyBuilder {
builder.laddrF = laddr
return builder
}
// AddrB set backend listen address
func (builder *ProxyBuilder) AddrB(laddr string) *ProxyBuilder {
builder.laddrE = laddr
return builder
}
// Heartbeat .
func (builder *ProxyBuilder) Heartbeat(timeout time.Duration) *ProxyBuilder {
builder.timeout = timeout
return builder
}
// DHKeyResolver set frontend dhkey resolver
func (builder *ProxyBuilder) DHKeyResolver(dhkeyResolver handler.DHKeyResolver) *ProxyBuilder {
builder.dhkeyResolver = dhkeyResolver
return builder
}
type _Proxy struct {
sync.RWMutex // mutex
gslogger.Log // mixin log APIs
name string //proxy name
frontend *gorpc.Acceptor // frontend
backend *gorpc.Acceptor // backend
proxy Proxy // proxy implement
clients map[string]*_Client // handle agent clients
idgen byte // tunnel id gen
tunnels map[byte]byte // tunnels
}
// Build .
func (builder *ProxyBuilder) Build(name string) Context {
<|fim▁hole|> clients: make(map[string]*_Client),
name: name,
tunnels: make(map[byte]byte),
}
proxy.frontend = gorpc.NewAcceptor(
fmt.Sprintf("%s.frontend", name),
gorpc.BuildPipeline(time.Millisecond*10).Handler(
"gsproxy-profile",
gorpc.ProfileHandler,
).Handler(
"gsproxy-hb",
func() gorpc.Handler {
return handler.NewHeartbeatHandler(builder.timeout)
},
).Handler(
"gsproxy-dh",
func() gorpc.Handler {
return handler.NewCryptoServer(builder.dhkeyResolver)
},
).Handler(
transProxyHandler,
proxy.newTransProxyHandler,
).Handler(
"gsproxy-client",
proxy.newClientHandler,
),
)
proxy.backend = gorpc.NewAcceptor(
fmt.Sprintf("%s.backend", name),
gorpc.BuildPipeline(time.Millisecond*10).Handler(
tunnelHandler,
proxy.newTunnelServer,
),
)
go func() {
if err := gorpc.TCPListen(proxy.backend, builder.laddrE); err != nil {
proxy.E("start agent backend error :%s", err)
}
}()
go func() {
if err := gorpc.TCPListen(proxy.frontend, builder.laddrF); err != nil {
proxy.E("start agent frontend error :%s", err)
}
}()
return proxy
}
func (proxy *_Proxy) Acceptor() *gorpc.Acceptor {
return proxy.frontend
}
func (proxy *_Proxy) String() string {
return proxy.name
}
func (proxy *_Proxy) Close() {
}
func (proxy *_Proxy) removeTunnelID(id byte) {
proxy.Lock()
defer proxy.Unlock()
delete(proxy.tunnels, id)
}
func (proxy *_Proxy) tunnelID() byte {
proxy.Lock()
defer proxy.Unlock()
for {
proxy.idgen++
if _, ok := proxy.tunnels[proxy.idgen]; !ok && proxy.idgen != 0 {
proxy.tunnels[proxy.idgen] = 0
return proxy.idgen
}
}
}
func (proxy *_Proxy) client(device *gorpc.Device) (*_Client, bool) {
proxy.RLock()
defer proxy.RUnlock()
client, ok := proxy.clients[device.String()]
return client, ok
}
func (proxy *_Proxy) addClient(client *_Client) {
proxy.Lock()
defer proxy.Unlock()
if client, ok := proxy.clients[client.device.String()]; ok {
proxy.proxy.RemoveClient(proxy, client)
client.Close()
}
proxy.clients[client.device.String()] = client
proxy.proxy.AddClient(proxy, client)
}
func (proxy *_Proxy) removeClient(client *_Client) {
proxy.Lock()
defer proxy.Unlock()
device := client.device
if old, ok := proxy.clients[device.String()]; ok && client == old {
proxy.proxy.RemoveClient(proxy, client)
}
}<|fim▁end|>
|
proxy := &_Proxy{
Log: gslogger.Get("gsproxy"),
proxy: builder.proxy,
|
<|file_name|>20181123102419-change-user-clients-sso-id-to-has-login.ts<|end_file_name|><|fim▁begin|>export const up = async function(db: any): Promise<any> {
return db.runSql(
`
ALTER TABLE user_clients ADD COLUMN has_login BOOLEAN DEFAULT FALSE;
<|fim▁hole|> `
);
};
export const down = function(): Promise<any> {
return null;
};<|fim▁end|>
|
UPDATE user_clients SET has_login = sso_id IS NOT NULL;
|
<|file_name|>TransactionService.java<|end_file_name|><|fim▁begin|>package pl.garciapl.banknow.service;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.List;
import pl.garciapl.banknow.model.Transaction;
import pl.garciapl.banknow.service.exceptions.GenericBankNowException;
import pl.garciapl.banknow.service.exceptions.InsufficientFundsException;
/**
* TransactionService - interface for TransactionServiceImpl
*<|fim▁hole|>
List<Transaction> getAllTransactions();
void makeDeposit(BigInteger account, BigDecimal amount);
void makeTransfer(BigInteger sender, BigInteger recipient, BigDecimal amount)
throws InsufficientFundsException, GenericBankNowException;
}<|fim▁end|>
|
* @author lukasz
*/
public interface TransactionService {
|
<|file_name|>0002_contact_project_socialsite.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-07-13 18:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('myblog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('message_from_me', models.TextField()),
('subject', models.CharField(max_length=33)),
('message_from_user', models.TextField()),
],<|fim▁hole|> ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=50)),
('link', models.URLField()),
('image', models.ImageField(default=None, upload_to='myblog/image/project')),
('detail', models.TextField()),
('created_on', models.DateTimeField()),
],
),
migrations.CreateModel(
name='SocialSite',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('site_name', models.CharField(max_length=10)),
('link', models.URLField()),
],
options={
'verbose_name_plural': 'Social Sites',
},
),
]<|fim▁end|>
|
),
migrations.CreateModel(
name='Project',
fields=[
|
<|file_name|>test_general_describer.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import division, print_function, unicode_literals, \
absolute_import
import unittest
import os
import json
import numpy as np
import pandas as pd
from pymatgen import Structure
from veidt.describer.general import FuncGenerator, MultiDescriber
from veidt.describer.structural_describer import DistinctSiteProperty
class GeneratorTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.data = np.random.rand(100, 3) * 10 - 5
cls.df = pd.DataFrame(cls.data, columns=["x", "y", "z"])
func_dict = {"sin": "np.sin",
"sum": "lambda d: d.sum(axis=1)",
"nest": "lambda d: np.log(np.exp(d['x']))"}
cls.generator = FuncGenerator(func_dict=func_dict)
def test_describe(self):<|fim▁hole|> np.testing.assert_array_equal(np.sum(self.data, axis=1),
results["sum"])
np.testing.assert_array_almost_equal(self.data[:, 0],
results["nest"])
def test_serialize(self):
json_str = json.dumps(self.generator.as_dict())
recover = FuncGenerator.from_dict(json.loads(json_str))
class MultiDescriberTest(unittest.TestCase):
def test_describe(self):
li2o = Structure.from_file(os.path.join(os.path.dirname(__file__),
"../../tests/Li2O.cif"))
na2o = Structure.from_file(os.path.join(os.path.dirname(__file__),
"../../tests/Na2O.cif"))
d1 = DistinctSiteProperty(['8c', '4a'], ["Z", "atomic_radius"])
d2 = FuncGenerator(func_dict={"exp": "np.exp"}, append=False)
d = MultiDescriber([d1, d2])
results = d.describe(li2o)
self.assertAlmostEqual(results.iloc[0]["exp 8c-Z"], np.exp(3))
self.assertAlmostEqual(results.iloc[0]["exp 8c-atomic_radius"],
np.exp(1.45))
df = d.describe_all([li2o, na2o])
self.assertAlmostEqual(df.iloc[0]["exp 8c-Z"], np.exp(3))
self.assertAlmostEqual(df.iloc[1]["exp 8c-Z"], np.exp(11))
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
results = self.generator.describe(self.df)
np.testing.assert_array_equal(np.sin(self.data),
results[["sin x", "sin y", "sin z"]])
|
<|file_name|>not-found.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-page-not-found',
templateUrl: './not-found.component.html',
styleUrls: ['./not-found.component.scss'],
})
export class NotFoundComponent implements OnInit {<|fim▁hole|> constructor() { }
ngOnInit() {
}
}<|fim▁end|>
| |
<|file_name|>ParseOptionsFile.ts<|end_file_name|><|fim▁begin|>import { Command } from '@dicy/types'
import Rule from '../Rule'
import { Action } from '../types'
export default class ParseOptionsFile extends Rule {
static commands: Set<Command> = new Set<Command>(['load'])
static defaultActions: Action[] = ['parse']
static description: string = 'Parses the YAML option file.'
async preEvaluate () {
await this.getResolvedInputs(['$CONFIG_HOME/dicy/config.yaml', 'dicy.yaml', '$NAME.yaml'])
if (this.inputs.length === 0) this.actions.delete('run')
}
<|fim▁hole|> if (output) {
output.value = await input.readYaml()
}
}
return true
}
}<|fim▁end|>
|
async parse () {
for (const input of this.inputs) {
const output = await this.getOutput(`${input.filePath}-ParsedYAML`)
|
<|file_name|>test_sob.py<|end_file_name|><|fim▁begin|># Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from __future__ import division, absolute_import
import os
import sys
from textwrap import dedent
from twisted.trial import unittest
from twisted.persisted import sob
from twisted.python import components
from twisted.persisted.styles import Ephemeral
class Dummy(components.Componentized):
pass
objects = [
1,
"hello",
(1, "hello"),
[1, "hello"],
{1:"hello"},
]
class FakeModule(object):
pass
class PersistTests(unittest.TestCase):
def testStyles(self):
for o in objects:
p = sob.Persistent(o, '')
for style in 'source pickle'.split():
p.setStyle(style)
p.save(filename='persisttest.'+style)
o1 = sob.load('persisttest.'+style, style)
self.assertEqual(o, o1)
def testStylesBeingSet(self):
o = Dummy()
o.foo = 5
o.setComponent(sob.IPersistable, sob.Persistent(o, 'lala'))
for style in 'source pickle'.split():
sob.IPersistable(o).setStyle(style)
sob.IPersistable(o).save(filename='lala.'+style)
o1 = sob.load('lala.'+style, style)
self.assertEqual(o.foo, o1.foo)
self.assertEqual(sob.IPersistable(o1).style, style)
def testPassphraseError(self):
"""
Calling save() with a passphrase is an error.
"""
p = sob.Persistant(None, 'object')
self.assertRaises(
TypeError, p.save, 'filename.pickle', passphrase='abc')
def testNames(self):
o = [1,2,3]
p = sob.Persistent(o, 'object')
for style in 'source pickle'.split():
p.setStyle(style)
p.save()
o1 = sob.load('object.ta'+style[0], style)
self.assertEqual(o, o1)
for tag in 'lala lolo'.split():
p.save(tag)
o1 = sob.load('object-'+tag+'.ta'+style[0], style)
self.assertEqual(o, o1)
def testPython(self):
with open("persisttest.python", 'w') as f:
f.write('foo=[1,2,3] ')
o = sob.loadValueFromFile('persisttest.python', 'foo')
self.assertEqual(o, [1,2,3])
def testTypeGuesser(self):
self.assertRaises(KeyError, sob.guessType, "file.blah")
self.assertEqual('python', sob.guessType("file.py"))<|fim▁hole|> self.assertEqual('python', sob.guessType("file.tac"))
self.assertEqual('python', sob.guessType("file.etac"))
self.assertEqual('pickle', sob.guessType("file.tap"))
self.assertEqual('pickle', sob.guessType("file.etap"))
self.assertEqual('source', sob.guessType("file.tas"))
self.assertEqual('source', sob.guessType("file.etas"))
def testEverythingEphemeralGetattr(self):
"""
L{_EverythingEphermal.__getattr__} will proxy the __main__ module as an
L{Ephemeral} object, and during load will be transparent, but after
load will return L{Ephemeral} objects from any accessed attributes.
"""
self.fakeMain.testMainModGetattr = 1
dirname = self.mktemp()
os.mkdir(dirname)
filename = os.path.join(dirname, 'persisttest.ee_getattr')
global mainWhileLoading
mainWhileLoading = None
with open(filename, "w") as f:
f.write(dedent("""
app = []
import __main__
app.append(__main__.testMainModGetattr == 1)
try:
__main__.somethingElse
except AttributeError:
app.append(True)
else:
app.append(False)
from twisted.test import test_sob
test_sob.mainWhileLoading = __main__
"""))
loaded = sob.load(filename, 'source')
self.assertIsInstance(loaded, list)
self.assertTrue(loaded[0], "Expected attribute not set.")
self.assertTrue(loaded[1], "Unexpected attribute set.")
self.assertIsInstance(mainWhileLoading, Ephemeral)
self.assertIsInstance(mainWhileLoading.somethingElse, Ephemeral)
del mainWhileLoading
def testEverythingEphemeralSetattr(self):
"""
Verify that _EverythingEphemeral.__setattr__ won't affect __main__.
"""
self.fakeMain.testMainModSetattr = 1
dirname = self.mktemp()
os.mkdir(dirname)
filename = os.path.join(dirname, 'persisttest.ee_setattr')
with open(filename, 'w') as f:
f.write('import __main__\n')
f.write('__main__.testMainModSetattr = 2\n')
f.write('app = None\n')
sob.load(filename, 'source')
self.assertEqual(self.fakeMain.testMainModSetattr, 1)
def testEverythingEphemeralException(self):
"""
Test that an exception during load() won't cause _EE to mask __main__
"""
dirname = self.mktemp()
os.mkdir(dirname)
filename = os.path.join(dirname, 'persisttest.ee_exception')
with open(filename, 'w') as f:
f.write('raise ValueError\n')
self.assertRaises(ValueError, sob.load, filename, 'source')
self.assertEqual(type(sys.modules['__main__']), FakeModule)
def setUp(self):
"""
Replace the __main__ module with a fake one, so that it can be mutated
in tests
"""
self.realMain = sys.modules['__main__']
self.fakeMain = sys.modules['__main__'] = FakeModule()
def tearDown(self):
"""
Restore __main__ to its original value
"""
sys.modules['__main__'] = self.realMain<|fim▁end|>
| |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import datetime
from flask import g
from sqlalchemy import (
Boolean,
Column,
DateTime,
ForeignKey,
Integer,
Sequence,
String,
Table,
UniqueConstraint,
)
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import backref, relationship
from ... import Model
from ..._compat import as_unicode
_dont_audit = False
class Permission(Model):
__tablename__ = "ab_permission"
id = Column(Integer, Sequence("ab_permission_id_seq"), primary_key=True)
name = Column(String(100), unique=True, nullable=False)
def __repr__(self):
return self.name
class ViewMenu(Model):
__tablename__ = "ab_view_menu"
id = Column(Integer, Sequence("ab_view_menu_id_seq"), primary_key=True)
name = Column(String(250), unique=True, nullable=False)
def __eq__(self, other):
return (isinstance(other, self.__class__)) and (self.name == other.name)
def __neq__(self, other):
return self.name != other.name
def __repr__(self):
return self.name
assoc_permissionview_role = Table(
"ab_permission_view_role",
Model.metadata,
Column("id", Integer, Sequence("ab_permission_view_role_id_seq"), primary_key=True),
Column("permission_view_id", Integer, ForeignKey("ab_permission_view.id")),
Column("role_id", Integer, ForeignKey("ab_role.id")),
UniqueConstraint("permission_view_id", "role_id"),
)
class Role(Model):
__tablename__ = "ab_role"
id = Column(Integer, Sequence("ab_role_id_seq"), primary_key=True)
name = Column(String(64), unique=True, nullable=False)
permissions = relationship(
"PermissionView", secondary=assoc_permissionview_role, backref="role"
)
def __repr__(self):
return self.name
class PermissionView(Model):
__tablename__ = "ab_permission_view"
__table_args__ = (UniqueConstraint("permission_id", "view_menu_id"),)
id = Column(Integer, Sequence("ab_permission_view_id_seq"), primary_key=True)
permission_id = Column(Integer, ForeignKey("ab_permission.id"))
permission = relationship("Permission")
view_menu_id = Column(Integer, ForeignKey("ab_view_menu.id"))
view_menu = relationship("ViewMenu")
def __repr__(self):
return str(self.permission).replace("_", " ") + " on " + str(self.view_menu)
assoc_user_role = Table(
"ab_user_role",
Model.metadata,
Column("id", Integer, Sequence("ab_user_role_id_seq"), primary_key=True),
Column("user_id", Integer, ForeignKey("ab_user.id")),
Column("role_id", Integer, ForeignKey("ab_role.id")),
UniqueConstraint("user_id", "role_id"),
)
class User(Model):
__tablename__ = "ab_user"
id = Column(Integer, Sequence("ab_user_id_seq"), primary_key=True)
first_name = Column(String(64), nullable=False)
last_name = Column(String(64), nullable=False)
username = Column(String(64), unique=True, nullable=False)
password = Column(String(256))
active = Column(Boolean)
email = Column(String(64), unique=True, nullable=False)
last_login = Column(DateTime)
login_count = Column(Integer)
fail_login_count = Column(Integer)
roles = relationship("Role", secondary=assoc_user_role, backref="user")
created_on = Column(DateTime, default=datetime.datetime.now, nullable=True)
changed_on = Column(DateTime, default=datetime.datetime.now, nullable=True)
@declared_attr
def created_by_fk(self):
return Column(
Integer, ForeignKey("ab_user.id"), default=self.get_user_id, nullable=True
)
@declared_attr
def changed_by_fk(self):
return Column(
Integer, ForeignKey("ab_user.id"), default=self.get_user_id, nullable=True
)
created_by = relationship(
"User",
backref=backref("created", uselist=True),
remote_side=[id],
primaryjoin="User.created_by_fk == User.id",
uselist=False,
)
changed_by = relationship(
"User",
backref=backref("changed", uselist=True),
remote_side=[id],
primaryjoin="User.changed_by_fk == User.id",
uselist=False,
)
@classmethod
def get_user_id(cls):
try:
return g.user.id
except Exception:
return None
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return self.active
@property
def is_anonymous(self):
return False
def get_id(self):
return as_unicode(self.id)
def get_full_name(self):
return u"{0} {1}".format(self.first_name, self.last_name)
def __repr__(self):
return self.get_full_name()
class RegisterUser(Model):
__tablename__ = "ab_register_user"<|fim▁hole|> password = Column(String(256))
email = Column(String(64), nullable=False)
registration_date = Column(DateTime, default=datetime.datetime.now, nullable=True)
registration_hash = Column(String(256))<|fim▁end|>
|
id = Column(Integer, Sequence("ab_register_user_id_seq"), primary_key=True)
first_name = Column(String(64), nullable=False)
last_name = Column(String(64), nullable=False)
username = Column(String(64), unique=True, nullable=False)
|
<|file_name|>GameSystems_service_binding.cc<|end_file_name|><|fim▁begin|>// This file is part of SWGANH which is released under the MIT license.
// See file LICENSE or go to http://swganh.com/LICENSE
#include "swganh_core/gamesystems/gamesystems_service_binding.h"
BOOST_PYTHON_MODULE(py_gamesystems)<|fim▁hole|>{
docstring_options local_docstring_options(true, true, false);
exportGameSystemsService();
}<|fim▁end|>
| |
<|file_name|>validators.py<|end_file_name|><|fim▁begin|># Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
from oslo_utils import netutils
from django.core.exceptions import ValidationError
from django.core import validators
from django.utils.translation import gettext_lazy as _
from horizon import conf
<|fim▁hole|>def validate_port_range(port):
if not netutils.is_valid_port(port):
raise ValidationError(_("Not a valid port number"))
def validate_icmp_type_range(icmp_type):
if not netutils.is_valid_icmp_type(icmp_type):
if icmp_type == -1:
return
raise ValidationError(_("Not a valid ICMP type"))
def validate_icmp_code_range(icmp_code):
if not netutils.is_valid_icmp_code(icmp_code):
if icmp_code == -1:
return
raise ValidationError(_("Not a valid ICMP code"))
def validate_ip_protocol(ip_proto):
if ip_proto < -1 or ip_proto > 255:
raise ValidationError(_("Not a valid IP protocol number"))
def password_validator():
return conf.HORIZON_CONFIG["password_validator"]["regex"]
def password_validator_msg():
return conf.HORIZON_CONFIG["password_validator"]["help_text"]
def validate_port_or_colon_separated_port_range(port_range):
"""Accepts a port number or a single-colon separated range."""
if port_range.count(':') > 1:
raise ValidationError(_("One colon allowed in port range"))
ports = port_range.split(':')
for port in ports:
validate_port_range(port)
def validate_metadata(value):
error_msg = _('Invalid metadata entry. Use comma-separated'
' key=value pairs')
if value:
specs = value.split(",")
for spec in specs:
keyval = spec.split("=")
# ensure both sides of "=" exist, but allow blank value
if not len(keyval) == 2 or not keyval[0]:
raise ValidationError(error_msg)
# Same as POSIX [:print:]. Accordingly, diacritics are disallowed.
PRINT_REGEX = re.compile(r'^[\x20-\x7E]*$')
validate_printable_ascii = validators.RegexValidator(
PRINT_REGEX,
_("The string may only contain ASCII printable characters."),
"invalid_characters")<|fim▁end|>
| |
<|file_name|>find_error.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Lexing error finder
~~~~~~~~~~~~~~~~~~~
For the source files given on the command line, display
the text where Error tokens are being generated, along
with some context.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""<|fim▁hole|>try:
import pygments
except ImportError:
# try parent path
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pygments.lexer import RegexLexer
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error, Text, _TokenType
class DebuggingRegexLexer(RegexLexer):
"""Make the state stack, position and current match instance attributes."""
def get_tokens_unprocessed(self, text, stack=('root',)):
"""
Split ``text`` into (tokentype, text) pairs.
``stack`` is the inital stack (default: ``['root']``)
"""
self.pos = 0
tokendefs = self._tokens
self.statestack = list(stack)
statetokens = tokendefs[self.statestack[-1]]
while 1:
for rexmatch, action, new_state in statetokens:
self.m = m = rexmatch(text, self.pos)
if m:
if type(action) is _TokenType:
yield self.pos, action, m.group()
else:
for item in action(self, m):
yield item
self.pos = m.end()
if new_state is not None:
# state transition
if isinstance(new_state, tuple):
for state in new_state:
if state == '#pop':
self.statestack.pop()
elif state == '#push':
self.statestack.append(self.statestack[-1])
else:
self.statestack.append(state)
elif isinstance(new_state, int):
# pop
del self.statestack[new_state:]
elif new_state == '#push':
self.statestack.append(self.statestack[-1])
else:
assert False, 'wrong state def: %r' % new_state
statetokens = tokendefs[self.statestack[-1]]
break
else:
try:
if text[self.pos] == '\n':
# at EOL, reset state to 'root'
self.pos += 1
self.statestack = ['root']
statetokens = tokendefs['root']
yield self.pos, Text, u'\n'
continue
yield self.pos, Error, text[self.pos]
self.pos += 1
except IndexError:
break
def main(fn, lexer=None):
if lexer is not None:
lx = get_lexer_by_name(lexer)
else:
try:
lx = get_lexer_for_filename(os.path.basename(fn))
except ValueError:
try:
name, rest = fn.split('_', 1)
lx = get_lexer_by_name(name)
except ValueError:
raise AssertionError('no lexer found for file %r' % fn)
debug_lexer = False
# does not work for e.g. ExtendedRegexLexers
if lx.__class__.__bases__ == (RegexLexer,):
lx.__class__.__bases__ = (DebuggingRegexLexer,)
debug_lexer = True
lno = 1
text = file(fn, 'U').read()
text = text.strip('\n') + '\n'
text = text.decode('latin1')
tokens = []
states = []
def show_token(tok, state):
reprs = map(repr, tok)
print ' ' + reprs[1] + ' ' + ' ' * (29-len(reprs[1])) + reprs[0],
if debug_lexer:
print ' ' + ' ' * (29-len(reprs[0])) + repr(state),
print
for type, val in lx.get_tokens(text):
lno += val.count('\n')
if type == Error:
print 'Error parsing', fn, 'on line', lno
print 'Previous tokens' + (debug_lexer and ' and states' or '') + ':'
if showall:
for tok, state in zip(tokens, states):
show_token(tok, state)
else:
for i in range(len(tokens) - num, len(tokens)):
show_token(tokens[i], states[i])
print 'Error token:'
l = len(repr(val))
print ' ' + repr(val),
if debug_lexer and hasattr(lx, 'statestack'):
print ' ' * (60-l) + repr(lx.statestack),
print
print
return 1
tokens.append((type,val))
if debug_lexer:
if hasattr(lx, 'statestack'):
states.append(lx.statestack[:])
else:
states.append(None)
if showall:
for tok, state in zip(tokens, states):
show_token(tok, state)
return 0
num = 10
showall = False
lexer = None
if __name__ == '__main__':
import getopt
opts, args = getopt.getopt(sys.argv[1:], 'n:l:a')
for opt, val in opts:
if opt == '-n':
num = int(val)
elif opt == '-a':
showall = True
elif opt == '-l':
lexer = val
ret = 0
for f in args:
ret += main(f, lexer)
sys.exit(bool(ret))<|fim▁end|>
|
import sys, os
|
<|file_name|>user.rs<|end_file_name|><|fim▁begin|>use super::super::schema::user;
#[derive(Queryable, Deserialize, Serialize, Clone)]<|fim▁hole|> pub username: String,
pub email: String,
pub password: String,
}
#[derive(Insertable)]
#[table_name="user"]
pub struct NewUser<'a> {
pub username: &'a str,
pub email: &'a str,
pub password: &'a str,
}<|fim▁end|>
|
pub struct User {
pub id: i32,
|
<|file_name|>cloudflare_dns.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016 Michael Gruener <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: cloudflare_dns
author: "Michael Gruener (@mgruener)"
requirements:
- "python >= 2.6"
version_added: "2.1"
short_description: manage Cloudflare DNS records
description:
- "Manages dns records via the Cloudflare API, see the docs: U(https://api.cloudflare.com/)"
options:
account_api_token:
description:
- >
Account API token. You can obtain your API key from the bottom of the Cloudflare 'My Account' page, found here: U(https://dash.cloudflare.com/)
required: true
account_email:
description:
- "Account email."
required: true
algorithm:
description:
- Algorithm number. Required for C(type=DS) and C(type=SSHFP) when C(state=present).
type: int
version_added: 2.7
cert_usage:
description:
- Certificate usage number. Required for C(type=TLSA) when C(state=present).
choices: [ 0, 1, 2, 3 ]
type: int
version_added: 2.7
hash_type:
description:
- Hash type number. Required for C(type=DS), C(type=SSHFP) and C(type=TLSA) when C(state=present).
choices: [ 1, 2 ]
type: int
version_added: 2.7
key_tag:
description:
- DNSSEC key tag. Needed for C(type=DS) when C(state=present).
type: int
version_added: 2.7
port:
description: Service port. Required for C(type=SRV) and C(type=TLSA).
priority:
description: Record priority. Required for C(type=MX) and C(type=SRV)
default: "1"
proto:
description:
- Service protocol. Required for C(type=SRV) and C(type=TLSA).
- Common values are tcp and udp.
- Before Ansible 2.6 only tcp and udp were available.
proxied:
description: Proxy through cloudflare network or just use DNS
type: bool
default: 'no'
version_added: "2.3"
record:
description:
- Record to add. Required if C(state=present). Default is C(@) (e.g. the zone name)
default: "@"
aliases: [ "name" ]
selector:
description:
- Selector number. Required for C(type=TLSA) when C(state=present).
choices: [ 0, 1 ]
type: int
version_added: 2.7
service:
description: Record service. Required for C(type=SRV)
solo:
description:
- Whether the record should be the only one for that record type and record name. Only use with C(state=present)
- This will delete all other records with the same record name and type.
state:
description:
- Whether the record(s) should exist or not
choices: [ 'present', 'absent' ]
default: present
timeout:
description:
- Timeout for Cloudflare API calls
default: 30
ttl:
description:
- The TTL to give the new record. Must be between 120 and 2,147,483,647 seconds, or 1 for automatic.
default: 1 (automatic)
type:
description:
- The type of DNS record to create. Required if C(state=present)
- C(type=DS), C(type=SSHFP) and C(type=TLSA) added in Ansible 2.7.
choices: [ 'A', 'AAAA', 'CNAME', 'TXT', 'SRV', 'MX', 'NS', 'DS', 'SPF', 'SSHFP', 'TLSA' ]
value:
description:
- The record value. Required for C(state=present)
aliases: [ "content" ]
weight:
description: Service weight. Required for C(type=SRV)
default: "1"
zone:
description:
- The name of the Zone to work with (e.g. "example.com"). The Zone must already exist.
required: true
aliases: ["domain"]
'''
EXAMPLES = '''
# create a test.my.com A record to point to 127.0.0.1
- cloudflare_dns:
zone: my.com
record: test
type: A
value: 127.0.0.1
account_email: [email protected]
account_api_token: dummyapitoken
register: record
# create a my.com CNAME record to example.com
- cloudflare_dns:
zone: my.com
type: CNAME
value: example.com
state: present
account_email: [email protected]
account_api_token: dummyapitoken
# change it's ttl
- cloudflare_dns:
zone: my.com
type: CNAME
value: example.com
ttl: 600
state: present
account_email: [email protected]
account_api_token: dummyapitoken
# and delete the record
- cloudflare_dns:
zone: my.com
type: CNAME
value: example.com
state: absent
account_email: [email protected]
account_api_token: dummyapitoken
# create a my.com CNAME record to example.com and proxy through cloudflare's network
- cloudflare_dns:
zone: my.com
type: CNAME
value: example.com
state: present
proxied: yes
account_email: [email protected]
account_api_token: dummyapitoken
# create TXT record "test.my.com" with value "unique value"
# delete all other TXT records named "test.my.com"
- cloudflare_dns:
domain: my.com
record: test
type: TXT
value: unique value
state: present
solo: true
account_email: [email protected]
account_api_token: dummyapitoken
# create a SRV record _foo._tcp.my.com
- cloudflare_dns:
domain: my.com
service: foo
proto: tcp
port: 3500
priority: 10
weight: 20
type: SRV
value: fooserver.my.com
# create a SSHFP record login.example.com
- cloudflare_dns:
zone: example.com
record: login
type: SSHFP
algorithm: 4
hash_type: 2
value: 9dc1d6742696d2f51ca1f1a78b3d16a840f7d111eb9454239e70db31363f33e1
# create a TLSA record _25._tcp.mail.example.com
- cloudflare_dns:
zone: example.com
record: mail
port: 25
proto: tcp
type: TLSA
cert_usage: 3
selector: 1
hash_type: 1
value: 6b76d034492b493e15a7376fccd08e63befdad0edab8e442562f532338364bf3
# Create a DS record for subdomain.example.com
- cloudflare_dns:
zone: example.com
record: subdomain
type: DS
key_tag: 5464
algorithm: 8
hash_type: 2
value: B4EB5AC4467D2DFB3BAF9FB9961DC1B6FED54A58CDFAA3E465081EC86F89BFAB
'''
RETURN = '''
record:
description: dictionary containing the record data
returned: success, except on record deletion
type: complex
contains:
content:
description: the record content (details depend on record type)
returned: success
type: string
sample: 192.0.2.91
created_on:
description: the record creation date
returned: success
type: string
sample: 2016-03-25T19:09:42.516553Z
data:
description: additional record data
returned: success, if type is SRV, DS, SSHFP or TLSA
type: dictionary
sample: {
name: "jabber",
port: 8080,
priority: 10,
proto: "_tcp",
service: "_xmpp",
target: "jabberhost.sample.com",
weight: 5,
}
id:
description: the record id
returned: success
type: string
sample: f9efb0549e96abcb750de63b38c9576e
locked:
description: No documentation available
returned: success
type: boolean
sample: False
meta:
description: No documentation available
returned: success
type: dictionary
sample: { auto_added: false }
modified_on:
description: record modification date
returned: success
type: string
sample: 2016-03-25T19:09:42.516553Z
name:
description: the record name as FQDN (including _service and _proto for SRV)
returned: success
type: string
sample: www.sample.com
priority:
description: priority of the MX record
returned: success, if type is MX
type: int
sample: 10
proxiable:
description: whether this record can be proxied through cloudflare
returned: success
type: boolean
sample: False
proxied:
description: whether the record is proxied through cloudflare
returned: success
type: boolean
sample: False
ttl:
description: the time-to-live for the record
returned: success
type: int
sample: 300
type:
description: the record type
returned: success
type: string
sample: A
zone_id:
description: the id of the zone containing the record
returned: success
type: string
sample: abcede0bf9f0066f94029d2e6b73856a
zone_name:
description: the name of the zone containing the record
returned: success
type: string
sample: sample.com
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.parse import urlencode
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.urls import fetch_url
def lowercase_string(param):
if not isinstance(param, str):
return param
return param.lower()
class CloudflareAPI(object):
cf_api_endpoint = 'https://api.cloudflare.com/client/v4'
changed = False
def __init__(self, module):
self.module = module
self.account_api_token = module.params['account_api_token']
self.account_email = module.params['account_email']
self.algorithm = module.params['algorithm']
self.cert_usage = module.params['cert_usage']
self.hash_type = module.params['hash_type']
self.key_tag = module.params['key_tag']
self.port = module.params['port']
self.priority = module.params['priority']
self.proto = lowercase_string(module.params['proto'])
self.proxied = module.params['proxied']
self.selector = module.params['selector']
self.record = lowercase_string(module.params['record'])
self.service = lowercase_string(module.params['service'])
self.is_solo = module.params['solo']
self.state = module.params['state']
self.timeout = module.params['timeout']
self.ttl = module.params['ttl']
self.type = module.params['type']
self.value = module.params['value']
self.weight = module.params['weight']
self.zone = lowercase_string(module.params['zone'])
if self.record == '@':
self.record = self.zone
if (self.type in ['CNAME', 'NS', 'MX', 'SRV']) and (self.value is not None):
self.value = self.value.rstrip('.').lower()
if (self.type == 'AAAA') and (self.value is not None):
self.value = self.value.lower()
if (self.type == 'SRV'):
if (self.proto is not None) and (not self.proto.startswith('_')):
self.proto = '_' + self.proto
if (self.service is not None) and (not self.service.startswith('_')):
self.service = '_' + self.service
if (self.type == 'TLSA'):
if (self.proto is not None) and (not self.proto.startswith('_')):
self.proto = '_' + self.proto
if (self.port is not None):
self.port = '_' + str(self.port)
if not self.record.endswith(self.zone):
self.record = self.record + '.' + self.zone
if (self.type == 'DS'):
if self.record == self.zone:
self.module.fail_json(msg="DS records only apply to subdomains.")
def _cf_simple_api_call(self, api_call, method='GET', payload=None):
headers = {'X-Auth-Email': self.account_email,
'X-Auth-Key': self.account_api_token,
'Content-Type': 'application/json'}
data = None
if payload:
try:
data = json.dumps(payload)
except Exception as e:
self.module.fail_json(msg="Failed to encode payload as JSON: %s " % to_native(e))
resp, info = fetch_url(self.module,
self.cf_api_endpoint + api_call,
headers=headers,
data=data,
method=method,
timeout=self.timeout)
if info['status'] not in [200, 304, 400, 401, 403, 429, 405, 415]:
self.module.fail_json(msg="Failed API call {0}; got unexpected HTTP code {1}".format(api_call, info['status']))
error_msg = ''
if info['status'] == 401:
# Unauthorized
error_msg = "API user does not have permission; Status: {0}; Method: {1}: Call: {2}".format(info['status'], method, api_call)
elif info['status'] == 403:
# Forbidden
error_msg = "API request not authenticated; Status: {0}; Method: {1}: Call: {2}".format(info['status'], method, api_call)
elif info['status'] == 429:
# Too many requests
error_msg = "API client is rate limited; Status: {0}; Method: {1}: Call: {2}".format(info['status'], method, api_call)
elif info['status'] == 405:
# Method not allowed
error_msg = "API incorrect HTTP method provided; Status: {0}; Method: {1}: Call: {2}".format(info['status'], method, api_call)
elif info['status'] == 415:
# Unsupported Media Type
error_msg = "API request is not valid JSON; Status: {0}; Method: {1}: Call: {2}".format(info['status'], method, api_call)
elif info['status'] == 400:
# Bad Request
error_msg = "API bad request; Status: {0}; Method: {1}: Call: {2}".format(info['status'], method, api_call)
result = None
try:
content = resp.read()
except AttributeError:
if info['body']:
content = info['body']
else:
error_msg += "; The API response was empty"
if content:
try:
result = json.loads(to_text(content, errors='surrogate_or_strict'))
except (getattr(json, 'JSONDecodeError', ValueError)) as e:
error_msg += "; Failed to parse API response with error {0}: {1}".format(to_native(e), content)
# Without a valid/parsed JSON response no more error processing can be done
if result is None:
self.module.fail_json(msg=error_msg)
if not result['success']:
error_msg += "; Error details: "
for error in result['errors']:
error_msg += "code: {0}, error: {1}; ".format(error['code'], error['message'])
if 'error_chain' in error:
for chain_error in error['error_chain']:
error_msg += "code: {0}, error: {1}; ".format(chain_error['code'], chain_error['message'])
self.module.fail_json(msg=error_msg)
return result, info['status']
def _cf_api_call(self, api_call, method='GET', payload=None):
result, status = self._cf_simple_api_call(api_call, method, payload)
data = result['result']
if 'result_info' in result:
pagination = result['result_info']
if pagination['total_pages'] > 1:
next_page = int(pagination['page']) + 1
parameters = ['page={0}'.format(next_page)]
# strip "page" parameter from call parameters (if there are any)
if '?' in api_call:
raw_api_call, query = api_call.split('?', 1)
parameters += [param for param in query.split('&') if not param.startswith('page')]
else:
raw_api_call = api_call
while next_page <= pagination['total_pages']:
raw_api_call += '?' + '&'.join(parameters)
result, status = self._cf_simple_api_call(raw_api_call, method, payload)
data += result['result']
next_page += 1
return data, status
def _get_zone_id(self, zone=None):
if not zone:
zone = self.zone
zones = self.get_zones(zone)
if len(zones) > 1:
self.module.fail_json(msg="More than one zone matches {0}".format(zone))
if len(zones) < 1:
self.module.fail_json(msg="No zone found with name {0}".format(zone))
return zones[0]['id']
def get_zones(self, name=None):
if not name:
name = self.zone
param = ''
if name:
param = '?' + urlencode({'name': name})
zones, status = self._cf_api_call('/zones' + param)
return zones
def get_dns_records(self, zone_name=None, type=None, record=None, value=''):
if not zone_name:
zone_name = self.zone
if not type:
type = self.type
if not record:
record = self.record
# necessary because None as value means to override user
# set module value
if (not value) and (value is not None):
value = self.value
zone_id = self._get_zone_id()
api_call = '/zones/{0}/dns_records'.format(zone_id)
query = {}
if type:
query['type'] = type
if record:
query['name'] = record
if value:
query['content'] = value
if query:
api_call += '?' + urlencode(query)
records, status = self._cf_api_call(api_call)
return records
def delete_dns_records(self, **kwargs):
params = {}
for param in ['port', 'proto', 'service', 'solo', 'type', 'record', 'value', 'weight', 'zone',
'algorithm', 'cert_usage', 'hash_type', 'selector', 'key_tag']:
if param in kwargs:
params[param] = kwargs[param]
else:
params[param] = getattr(self, param)
records = []
content = params['value']
search_record = params['record']
if params['type'] == 'SRV':
if not (params['value'] is None or params['value'] == ''):
content = str(params['weight']) + '\t' + str(params['port']) + '\t' + params['value']
search_record = params['service'] + '.' + params['proto'] + '.' + params['record']
elif params['type'] == 'DS':
if not (params['value'] is None or params['value'] == ''):
content = str(params['key_tag']) + '\t' + str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
elif params['type'] == 'SSHFP':
if not (params['value'] is None or params['value'] == ''):
content = str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
elif params['type'] == 'TLSA':
if not (params['value'] is None or params['value'] == ''):
content = str(params['cert_usage']) + '\t' + str(params['selector']) + '\t' + str(params['hash_type']) + '\t' + params['value']
search_record = params['port'] + '.' + params['proto'] + '.' + params['record']
if params['solo']:
search_value = None
else:
search_value = content
records = self.get_dns_records(params['zone'], params['type'], search_record, search_value)
for rr in records:
if params['solo']:
if not ((rr['type'] == params['type']) and (rr['name'] == search_record) and (rr['content'] == content)):
self.changed = True
if not self.module.check_mode:
result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(rr['zone_id'], rr['id']), 'DELETE')
else:
self.changed = True
if not self.module.check_mode:
result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(rr['zone_id'], rr['id']), 'DELETE')
return self.changed
def ensure_dns_record(self, **kwargs):
params = {}
for param in ['port', 'priority', 'proto', 'proxied', 'service', 'ttl', 'type', 'record', 'value', 'weight', 'zone',
'algorithm', 'cert_usage', 'hash_type', 'selector', 'key_tag']:
if param in kwargs:
params[param] = kwargs[param]
else:
params[param] = getattr(self, param)
search_value = params['value']
search_record = params['record']
new_record = None
if (params['type'] is None) or (params['record'] is None):
self.module.fail_json(msg="You must provide a type and a record to create a new record")
if (params['type'] in ['A', 'AAAA', 'CNAME', 'TXT', 'MX', 'NS', 'SPF']):
if not params['value']:
self.module.fail_json(msg="You must provide a non-empty value to create this record type")
# there can only be one CNAME per record
# ignoring the value when searching for existing
# CNAME records allows us to update the value if it
# changes
if params['type'] == 'CNAME':
search_value = None
new_record = {
"type": params['type'],
"name": params['record'],
"content": params['value'],
"ttl": params['ttl']
}
if (params['type'] in ['A', 'AAAA', 'CNAME']):
new_record["proxied"] = params["proxied"]
if params['type'] == 'MX':
for attr in [params['priority'], params['value']]:
if (attr is None) or (attr == ''):
self.module.fail_json(msg="You must provide priority and a value to create this record type")<|fim▁hole|> "content": params['value'],
"priority": params['priority'],
"ttl": params['ttl']
}
if params['type'] == 'SRV':
for attr in [params['port'], params['priority'], params['proto'], params['service'], params['weight'], params['value']]:
if (attr is None) or (attr == ''):
self.module.fail_json(msg="You must provide port, priority, proto, service, weight and a value to create this record type")
srv_data = {
"target": params['value'],
"port": params['port'],
"weight": params['weight'],
"priority": params['priority'],
"name": params['record'][:-len('.' + params['zone'])],
"proto": params['proto'],
"service": params['service']
}
new_record = {"type": params['type'], "ttl": params['ttl'], 'data': srv_data}
search_value = str(params['weight']) + '\t' + str(params['port']) + '\t' + params['value']
search_record = params['service'] + '.' + params['proto'] + '.' + params['record']
if params['type'] == 'DS':
for attr in [params['key_tag'], params['algorithm'], params['hash_type'], params['value']]:
if (attr is None) or (attr == ''):
self.module.fail_json(msg="You must provide key_tag, algorithm, hash_type and a value to create this record type")
ds_data = {
"key_tag": params['key_tag'],
"algorithm": params['algorithm'],
"digest_type": params['hash_type'],
"digest": params['value'],
}
new_record = {
"type": params['type'],
"name": params['record'],
'data': ds_data,
"ttl": params['ttl'],
}
search_value = str(params['key_tag']) + '\t' + str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
if params['type'] == 'SSHFP':
for attr in [params['algorithm'], params['hash_type'], params['value']]:
if (attr is None) or (attr == ''):
self.module.fail_json(msg="You must provide algorithm, hash_type and a value to create this record type")
sshfp_data = {
"fingerprint": params['value'],
"type": params['hash_type'],
"algorithm": params['algorithm'],
}
new_record = {
"type": params['type'],
"name": params['record'],
'data': sshfp_data,
"ttl": params['ttl'],
}
search_value = str(params['algorithm']) + '\t' + str(params['hash_type']) + '\t' + params['value']
if params['type'] == 'TLSA':
for attr in [params['port'], params['proto'], params['cert_usage'], params['selector'], params['hash_type'], params['value']]:
if (attr is None) or (attr == ''):
self.module.fail_json(msg="You must provide port, proto, cert_usage, selector, hash_type and a value to create this record type")
search_record = params['port'] + '.' + params['proto'] + '.' + params['record']
tlsa_data = {
"usage": params['cert_usage'],
"selector": params['selector'],
"matching_type": params['hash_type'],
"certificate": params['value'],
}
new_record = {
"type": params['type'],
"name": search_record,
'data': tlsa_data,
"ttl": params['ttl'],
}
search_value = str(params['cert_usage']) + '\t' + str(params['selector']) + '\t' + str(params['hash_type']) + '\t' + params['value']
zone_id = self._get_zone_id(params['zone'])
records = self.get_dns_records(params['zone'], params['type'], search_record, search_value)
# in theory this should be impossible as cloudflare does not allow
# the creation of duplicate records but lets cover it anyways
if len(records) > 1:
self.module.fail_json(msg="More than one record already exists for the given attributes. That should be impossible, please open an issue!")
# record already exists, check if it must be updated
if len(records) == 1:
cur_record = records[0]
do_update = False
if (params['ttl'] is not None) and (cur_record['ttl'] != params['ttl']):
do_update = True
if (params['priority'] is not None) and ('priority' in cur_record) and (cur_record['priority'] != params['priority']):
do_update = True
if ('proxied' in new_record) and ('proxied' in cur_record) and (cur_record['proxied'] != params['proxied']):
do_update = True
if ('data' in new_record) and ('data' in cur_record):
if (cur_record['data'] != new_record['data']):
do_update = True
if (params['type'] == 'CNAME') and (cur_record['content'] != new_record['content']):
do_update = True
if do_update:
if self.module.check_mode:
result = new_record
else:
result, info = self._cf_api_call('/zones/{0}/dns_records/{1}'.format(zone_id, records[0]['id']), 'PUT', new_record)
self.changed = True
return result, self.changed
else:
return records, self.changed
if self.module.check_mode:
result = new_record
else:
result, info = self._cf_api_call('/zones/{0}/dns_records'.format(zone_id), 'POST', new_record)
self.changed = True
return result, self.changed
def main():
module = AnsibleModule(
argument_spec=dict(
account_api_token=dict(required=True, no_log=True, type='str'),
account_email=dict(required=True, type='str'),
algorithm=dict(required=False, default=None, type='int'),
cert_usage=dict(required=False, default=None, choices=[0, 1, 2, 3], type='int'),
hash_type=dict(required=False, default=None, choices=[1, 2], type='int'),
key_tag=dict(required=False, default=None, type='int'),
port=dict(required=False, default=None, type='int'),
priority=dict(required=False, default=1, type='int'),
proto=dict(required=False, default=None, type='str'),
proxied=dict(required=False, default=False, type='bool'),
record=dict(required=False, default='@', aliases=['name'], type='str'),
selector=dict(required=False, default=None, choices=[0, 1], type='int'),
service=dict(required=False, default=None, type='str'),
solo=dict(required=False, default=None, type='bool'),
state=dict(required=False, default='present', choices=['present', 'absent'], type='str'),
timeout=dict(required=False, default=30, type='int'),
ttl=dict(required=False, default=1, type='int'),
type=dict(required=False, default=None, choices=['A', 'AAAA', 'CNAME', 'TXT', 'SRV', 'MX', 'NS', 'DS', 'SPF', 'SSHFP', 'TLSA'], type='str'),
value=dict(required=False, default=None, aliases=['content'], type='str'),
weight=dict(required=False, default=1, type='int'),
zone=dict(required=True, default=None, aliases=['domain'], type='str'),
),
supports_check_mode=True,
required_if=([
('state', 'present', ['record', 'type', 'value']),
('state', 'absent', ['record']),
('type', 'SRV', ['proto', 'service']),
('type', 'TLSA', ['proto', 'port']),
]
),
)
if module.params['type'] == 'SRV':
if not ((module.params['weight'] is not None and module.params['port'] is not None
and not (module.params['value'] is None or module.params['value'] == ''))
or (module.params['weight'] is None and module.params['port'] is None
and (module.params['value'] is None or module.params['value'] == ''))):
module.fail_json(msg="For SRV records the params weight, port and value all need to be defined, or not at all.")
if module.params['type'] == 'SSHFP':
if not ((module.params['algorithm'] is not None and module.params['hash_type'] is not None
and not (module.params['value'] is None or module.params['value'] == ''))
or (module.params['algorithm'] is None and module.params['hash_type'] is None
and (module.params['value'] is None or module.params['value'] == ''))):
module.fail_json(msg="For SSHFP records the params algorithm, hash_type and value all need to be defined, or not at all.")
if module.params['type'] == 'TLSA':
if not ((module.params['cert_usage'] is not None and module.params['selector'] is not None and module.params['hash_type'] is not None
and not (module.params['value'] is None or module.params['value'] == ''))
or (module.params['cert_usage'] is None and module.params['selector'] is None and module.params['hash_type'] is None
and (module.params['value'] is None or module.params['value'] == ''))):
module.fail_json(msg="For TLSA records the params cert_usage, selector, hash_type and value all need to be defined, or not at all.")
if module.params['type'] == 'DS':
if not ((module.params['key_tag'] is not None and module.params['algorithm'] is not None and module.params['hash_type'] is not None
and not (module.params['value'] is None or module.params['value'] == ''))
or (module.params['key_tag'] is None and module.params['algorithm'] is None and module.params['hash_type'] is None
and (module.params['value'] is None or module.params['value'] == ''))):
module.fail_json(msg="For DS records the params key_tag, algorithm, hash_type and value all need to be defined, or not at all.")
changed = False
cf_api = CloudflareAPI(module)
# sanity checks
if cf_api.is_solo and cf_api.state == 'absent':
module.fail_json(msg="solo=true can only be used with state=present")
# perform add, delete or update (only the TTL can be updated) of one or
# more records
if cf_api.state == 'present':
# delete all records matching record name + type
if cf_api.is_solo:
changed = cf_api.delete_dns_records(solo=cf_api.is_solo)
result, changed = cf_api.ensure_dns_record()
if isinstance(result, list):
module.exit_json(changed=changed, result={'record': result[0]})
else:
module.exit_json(changed=changed, result={'record': result})
else:
# force solo to False, just to be sure
changed = cf_api.delete_dns_records(solo=False)
module.exit_json(changed=changed)
if __name__ == '__main__':
main()<|fim▁end|>
|
new_record = {
"type": params['type'],
"name": params['record'],
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from .models import Batch, Event, PreClaim, Claim, Department, Student
from django.contrib.admin.models import LogEntry
# Register your models here.
def js_approve(modeladmin, request, queryset):
queryset.update(js_approved = True)
js_approve.short_description = 'Joint Sec Approves'
def dean_approve(modeladmin,request, queryset):
queryset.update(dean_approved = True)
dean_approve.short_description = 'Dean Approves'
def sis_approve(modeladmin,request, queryset):
queryset.update(sis_approved = True)
sis_approve.short_description = 'SIS Approves'
@admin.register(PreClaim)
class PreClaimAdmin(admin.ModelAdmin):
exclude = ('students','dean_approved')
list_display = ('event','dean_approved')
actions = [dean_approve,]
def get_actions(self,request):
actions = super(PreClaimAdmin, self).get_actions(request)<|fim▁hole|> if not request.user.has_perm('attendance.preclaim_dean_approve'):
del actions['dean_approve']
return actions
@admin.register(Claim)
class ClaimAdmin(admin.ModelAdmin):
exclude = ('sis_approved',)
list_display = ('student','name','period','date','event','pre_claim_approved','js_approved','sis_approved')
actions = [js_approve,sis_approve]
search_fields = ['student__name','student__roll_no','period__department__name']
def get_actions(self,request):
actions = super(ClaimAdmin, self).get_actions(request)
if not request.user.has_perm('attendance.claim_js_approve'):
del actions['js_approve']
if not request.user.has_perm('attendance.claim_sis_approve'):
del actions['sis_approve']
return actions
admin.site.register(Batch)
admin.site.register(Event)
admin.site.register(Department)
#admin.site.register(Student)
admin.site.register(LogEntry)
admin.site.site_header = "KMC Office"
admin.site.title = "KMC Office"
admin.site.index_title = ""<|fim▁end|>
| |
<|file_name|>customisation.py<|end_file_name|><|fim▁begin|>import os
from django import template
from django.conf import settings
from django.utils.safestring import mark_safe
register = template.Library()
<|fim▁hole|> settings.MEDIA_ROOT,
"overrides.css"
)
if os.path.exists(theme_path):
return mark_safe(
'<link rel="stylesheet" type="text/css" href="{}" />'.format(
os.path.join(settings.MEDIA_URL, "overrides.css")
)
)
return ""
@register.simple_tag()
def custom_js():
theme_path = os.path.join(
settings.MEDIA_ROOT,
"overrides.js"
)
if os.path.exists(theme_path):
return mark_safe(
'<script src="{}"></script>'.format(
os.path.join(settings.MEDIA_URL, "overrides.js")
)
)
return ""<|fim▁end|>
|
@register.simple_tag()
def custom_css():
theme_path = os.path.join(
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
from . import types_of_service
class as_external_lsa(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/as-external-lsa. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Contents of the AS External LSA
"""
__slots__ = ("_path_helper", "_extmethods", "__state", "__types_of_service")
_yang_name = "as-external-lsa"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__types_of_service = YANGDynClass(
base=types_of_service.types_of_service,
is_container="container",
yang_name="types-of-service",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"as-external-lsa",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/state (container)
YANG Description: State parameters for the AS external LSA
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters for the AS external LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_types_of_service(self):
"""
Getter method for types_of_service, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/types_of_service (container)
YANG Description: Breakdown of External LSA contents specifying multiple
TOS values
"""
return self.__types_of_service
def _set_types_of_service(self, v, load=False):
"""
Setter method for types_of_service, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/types_of_service (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_types_of_service is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_types_of_service() directly.
YANG Description: Breakdown of External LSA contents specifying multiple
TOS values
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=types_of_service.types_of_service,
is_container="container",
yang_name="types-of-service",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """types_of_service must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=types_of_service.types_of_service, is_container='container', yang_name="types-of-service", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__types_of_service = t
if hasattr(self, "_set"):
self._set()
def _unset_types_of_service(self):
self.__types_of_service = YANGDynClass(
base=types_of_service.types_of_service,
is_container="container",
yang_name="types-of-service",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,<|fim▁hole|> types_of_service = __builtin__.property(_get_types_of_service)
_pyangbind_elements = OrderedDict(
[("state", state), ("types_of_service", types_of_service)]
)
from . import state
from . import types_of_service
class as_external_lsa(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa-types/lsa-type/lsas/lsa/as-external-lsa. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Contents of the AS External LSA
"""
__slots__ = ("_path_helper", "_extmethods", "__state", "__types_of_service")
_yang_name = "as-external-lsa"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__types_of_service = YANGDynClass(
base=types_of_service.types_of_service,
is_container="container",
yang_name="types-of-service",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"ospfv2",
"areas",
"area",
"lsdb",
"lsa-types",
"lsa-type",
"lsas",
"lsa",
"as-external-lsa",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/state (container)
YANG Description: State parameters for the AS external LSA
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters for the AS external LSA
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_types_of_service(self):
"""
Getter method for types_of_service, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/types_of_service (container)
YANG Description: Breakdown of External LSA contents specifying multiple
TOS values
"""
return self.__types_of_service
def _set_types_of_service(self, v, load=False):
"""
Setter method for types_of_service, mapped from YANG variable /network_instances/network_instance/protocols/protocol/ospfv2/areas/area/lsdb/lsa_types/lsa_type/lsas/lsa/as_external_lsa/types_of_service (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_types_of_service is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_types_of_service() directly.
YANG Description: Breakdown of External LSA contents specifying multiple
TOS values
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=types_of_service.types_of_service,
is_container="container",
yang_name="types-of-service",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """types_of_service must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=types_of_service.types_of_service, is_container='container', yang_name="types-of-service", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__types_of_service = t
if hasattr(self, "_set"):
self._set()
def _unset_types_of_service(self):
self.__types_of_service = YANGDynClass(
base=types_of_service.types_of_service,
is_container="container",
yang_name="types-of-service",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
types_of_service = __builtin__.property(_get_types_of_service)
_pyangbind_elements = OrderedDict(
[("state", state), ("types_of_service", types_of_service)]
)<|fim▁end|>
|
)
state = __builtin__.property(_get_state)
|
<|file_name|>git.py<|end_file_name|><|fim▁begin|>import subprocess
import re
import os
from app import util
BLAME_NAME_REX = re.compile(r'\(([\w\s]+)\d{4}')
def git_path(path):
"""Returns the top-level git path."""
dir_ = path
if os.path.isfile(path):
dir_ = os.path.split(path)[0]
proc = subprocess.Popen(
['git', 'rev-parse', '--show-toplevel'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=dir_
)
out = proc.communicate()[0]
if out:
return out.strip()
return None
def git_name():
return subprocess.check_output(["git", "config", "user.name"]).strip()
def git_branch(path):
working_dir = path
if not os.path.isdir(path):
working_dir = os.path.split(path)[0]
proc = subprocess.Popen(
['git', 'rev-parse', '--abbrev-ref', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir
)
out, err = proc.communicate()
if err:
return None
return out.strip()
def git_branch_files(path):
path = util.path_dir(path)
if not path:
raise Exception("Bad path: {}".format(path))
top_dir = git_path(path)
proc = subprocess.Popen(
["git", "diff", "--name-only"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=top_dir
)
out = proc.communicate()[0]
all_files = set(out.splitlines())
branch = git_branch(path)
if branch != 'master':
proc = subprocess.Popen(
["git", "diff", "--name-only", "master..HEAD"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=path
)
out = proc.communicate()[0]<|fim▁hole|> return [os.path.join(top_dir, i) for i in all_files if i]
def blame(path):
working_dir = os.path.split(path)[0]
proc = subprocess.Popen(
['git', 'blame', path],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=working_dir
)
out, err = proc.communicate()
blame_lines = (out + err).splitlines()
result = {}
for i, line in enumerate(blame_lines):
match = BLAME_NAME_REX.search(line)
if match:
result[i] = match.group(1).strip()
else:
result[i] = None
return result<|fim▁end|>
|
all_files.update(out.splitlines())
|
<|file_name|>indexer3.ts<|end_file_name|><|fim▁begin|>var dateMap: { [x: string]: Date; } = {}
<|fim▁hole|>var r: Date = dateMap["hello"] // result type includes indexer using BCT<|fim▁end|>
| |
<|file_name|>sysccapi.go<|end_file_name|><|fim▁begin|>/*
Copyright IBM Corp. All Rights Reserved.
SPDX-License-Identifier: Apache-2.0
*/
package scc
import (
"errors"
"fmt"
"github.com/hyperledger/fabric/common/flogging"
"github.com/hyperledger/fabric/common/util"
"github.com/hyperledger/fabric/core/chaincode/shim"
"github.com/hyperledger/fabric/core/common/ccprovider"
"github.com/hyperledger/fabric/core/container/ccintf"
"github.com/hyperledger/fabric/core/container/inproccontroller"
"github.com/hyperledger/fabric/core/peer"
pb "github.com/hyperledger/fabric/protos/peer"
"github.com/spf13/viper"
)
var sysccLogger = flogging.MustGetLogger("sccapi")
// Registrar provides a way for system chaincodes to be registered
type Registrar interface {
// Register registers a system chaincode
Register(ccid *ccintf.CCID, cc shim.Chaincode) error
}
// SystemChaincode defines the metadata needed to initialize system chaincode
// when the fabric comes up. SystemChaincodes are installed by adding an
// entry in importsysccs.go
type SystemChaincode struct {
//Unique name of the system chaincode<|fim▁hole|>
//Path to the system chaincode; currently not used
Path string
//InitArgs initialization arguments to startup the system chaincode
InitArgs [][]byte
// Chaincode holds the actual chaincode instance
Chaincode shim.Chaincode
// InvokableExternal keeps track of whether
// this system chaincode can be invoked
// through a proposal sent to this peer
InvokableExternal bool
// InvokableCC2CC keeps track of whether
// this system chaincode can be invoked
// by way of a chaincode-to-chaincode
// invocation
InvokableCC2CC bool
// Enabled a convenient switch to enable/disable system chaincode without
// having to remove entry from importsysccs.go
Enabled bool
}
type SysCCWrapper struct {
SCC *SystemChaincode
}
func (sccw *SysCCWrapper) Name() string { return sccw.SCC.Name }
func (sccw *SysCCWrapper) Path() string { return sccw.SCC.Path }
func (sccw *SysCCWrapper) InitArgs() [][]byte { return sccw.SCC.InitArgs }
func (sccw *SysCCWrapper) Chaincode() shim.Chaincode { return sccw.SCC.Chaincode }
func (sccw *SysCCWrapper) InvokableExternal() bool { return sccw.SCC.InvokableExternal }
func (sccw *SysCCWrapper) InvokableCC2CC() bool { return sccw.SCC.InvokableCC2CC }
func (sccw *SysCCWrapper) Enabled() bool { return sccw.SCC.Enabled }
type SelfDescribingSysCC interface {
//Unique name of the system chaincode
Name() string
//Path to the system chaincode; currently not used
Path() string
//InitArgs initialization arguments to startup the system chaincode
InitArgs() [][]byte
// Chaincode returns the underlying chaincode
Chaincode() shim.Chaincode
// InvokableExternal keeps track of whether
// this system chaincode can be invoked
// through a proposal sent to this peer
InvokableExternal() bool
// InvokableCC2CC keeps track of whether
// this system chaincode can be invoked
// by way of a chaincode-to-chaincode
// invocation
InvokableCC2CC() bool
// Enabled a convenient switch to enable/disable system chaincode without
// having to remove entry from importsysccs.go
Enabled() bool
}
// registerSysCC registers the given system chaincode with the peer
func (p *Provider) registerSysCC(syscc SelfDescribingSysCC) (bool, error) {
if !syscc.Enabled() || !isWhitelisted(syscc) {
sysccLogger.Info(fmt.Sprintf("system chaincode (%s,%s,%t) disabled", syscc.Name(), syscc.Path(), syscc.Enabled()))
return false, nil
}
// XXX This is an ugly hack, version should be tied to the chaincode instance, not he peer binary
version := util.GetSysCCVersion()
ccid := &ccintf.CCID{
Name: syscc.Name(),
Version: version,
}
err := p.Registrar.Register(ccid, syscc.Chaincode())
if err != nil {
//if the type is registered, the instance may not be... keep going
if _, ok := err.(inproccontroller.SysCCRegisteredErr); !ok {
errStr := fmt.Sprintf("could not register (%s,%v): %s", syscc.Path(), syscc, err)
sysccLogger.Error(errStr)
return false, fmt.Errorf(errStr)
}
}
sysccLogger.Infof("system chaincode %s(%s) registered", syscc.Name(), syscc.Path())
return true, err
}
// deploySysCC deploys the given system chaincode on a chain
func deploySysCC(chainID string, ccprov ccprovider.ChaincodeProvider, syscc SelfDescribingSysCC) error {
if !syscc.Enabled() || !isWhitelisted(syscc) {
sysccLogger.Info(fmt.Sprintf("system chaincode (%s,%s) disabled", syscc.Name(), syscc.Path()))
return nil
}
txid := util.GenerateUUID()
// Note, this structure is barely initialized,
// we omit the history query executor, the proposal
// and the signed proposal
txParams := &ccprovider.TransactionParams{
TxID: txid,
ChannelID: chainID,
}
if chainID != "" {
lgr := peer.GetLedger(chainID)
if lgr == nil {
panic(fmt.Sprintf("syschain %s start up failure - unexpected nil ledger for channel %s", syscc.Name(), chainID))
}
txsim, err := lgr.NewTxSimulator(txid)
if err != nil {
return err
}
txParams.TXSimulator = txsim
defer txsim.Done()
}
chaincodeID := &pb.ChaincodeID{Path: syscc.Path(), Name: syscc.Name()}
spec := &pb.ChaincodeSpec{Type: pb.ChaincodeSpec_Type(pb.ChaincodeSpec_Type_value["GOLANG"]), ChaincodeId: chaincodeID, Input: &pb.ChaincodeInput{Args: syscc.InitArgs()}}
chaincodeDeploymentSpec := &pb.ChaincodeDeploymentSpec{ExecEnv: pb.ChaincodeDeploymentSpec_SYSTEM, ChaincodeSpec: spec}
// XXX This is an ugly hack, version should be tied to the chaincode instance, not he peer binary
version := util.GetSysCCVersion()
cccid := &ccprovider.CCContext{
Name: chaincodeDeploymentSpec.ChaincodeSpec.ChaincodeId.Name,
Version: version,
}
resp, _, err := ccprov.ExecuteLegacyInit(txParams, cccid, chaincodeDeploymentSpec)
if err == nil && resp.Status != shim.OK {
err = errors.New(resp.Message)
}
sysccLogger.Infof("system chaincode %s/%s(%s) deployed", syscc.Name(), chainID, syscc.Path())
return err
}
// deDeploySysCC stops the system chaincode and deregisters it from inproccontroller
func deDeploySysCC(chainID string, ccprov ccprovider.ChaincodeProvider, syscc SelfDescribingSysCC) error {
// XXX This is an ugly hack, version should be tied to the chaincode instance, not he peer binary
version := util.GetSysCCVersion()
ccci := &ccprovider.ChaincodeContainerInfo{
Type: "GOLANG",
Name: syscc.Name(),
Path: syscc.Path(),
Version: version,
ContainerType: inproccontroller.ContainerType,
}
err := ccprov.Stop(ccci)
return err
}
func isWhitelisted(syscc SelfDescribingSysCC) bool {
chaincodes := viper.GetStringMapString("chaincode.system")
val, ok := chaincodes[syscc.Name()]
enabled := val == "enable" || val == "true" || val == "yes"
return ok && enabled
}<|fim▁end|>
|
Name string
|
<|file_name|>discreteBarChart.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Examples for Python-nvd3 is a Python wrapper for NVD3 graph library.
NVD3 is an attempt to build re-usable charts and chart components
for d3.js without taking away the power that d3.js gives you.
Project location : https://github.com/areski/python-nvd3
"""
from nvd3 import discreteBarChart
#Open File for test
output_file = open('test_discreteBarChart.html', 'w')
type = "discreteBarChart"
chart = discreteBarChart(name='mygraphname', height=400, width=600)
chart.set_containerheader("\n\n<h2>" + type + "</h2>\n\n")
xdata = ["A", "B", "C", "D", "E", "F", "G"]
ydata = [3, 12, -10, 5, 25, -7, 2]
extra_serie = {"tooltip": {"y_start": "", "y_end": " cal"}}
chart.add_serie(y=ydata, x=xdata, extra=extra_serie)<|fim▁hole|>#---------------------------------------
#close Html file
output_file.close()<|fim▁end|>
|
chart.buildhtml()
output_file.write(chart.htmlcontent)
|
<|file_name|>Menu.java<|end_file_name|><|fim▁begin|>package ppp.menu;
import java.awt.image.BufferedImage;
public abstract interface Menu {
public abstract void up();<|fim▁hole|>}<|fim▁end|>
|
public abstract void down();
public abstract void enter();
public abstract void escape();
public abstract BufferedImage getImage();
|
<|file_name|>test_plugins.py<|end_file_name|><|fim▁begin|>import types
import unittest
from collections import namedtuple
import os
import sys
import tempfile
from zipfile import ZipFile, ZipInfo
from utils import jar_utils
sys.path.append('tests/unit/')
import mock
from plugins.systems.config_container_crawler import ConfigContainerCrawler
from plugins.systems.config_host_crawler import ConfigHostCrawler
from plugins.systems.connection_container_crawler import ConnectionContainerCrawler
from plugins.systems.connection_host_crawler import ConnectionHostCrawler
from plugins.systems.connection_vm_crawler import ConnectionVmCrawler
from plugins.systems.cpu_container_crawler import CpuContainerCrawler
from plugins.systems.cpu_host_crawler import CpuHostCrawler
from plugins.systems.disk_container_crawler import DiskContainerCrawler
from plugins.systems.disk_host_crawler import DiskHostCrawler
from plugins.systems.dockerhistory_container_crawler import DockerhistoryContainerCrawler
from plugins.systems.dockerinspect_container_crawler import DockerinspectContainerCrawler
from plugins.systems.dockerps_host_crawler import DockerpsHostCrawler
from plugins.systems.file_container_crawler import FileContainerCrawler
from plugins.systems.file_host_crawler import FileHostCrawler
from plugins.systems.interface_container_crawler import InterfaceContainerCrawler
from plugins.systems.interface_host_crawler import InterfaceHostCrawler
from plugins.systems.interface_vm_crawler import InterfaceVmCrawler
from plugins.systems.jar_container_crawler import JarContainerCrawler
from plugins.systems.jar_host_crawler import JarHostCrawler
from plugins.systems.load_container_crawler import LoadContainerCrawler
from plugins.systems.load_host_crawler import LoadHostCrawler
from plugins.systems.memory_container_crawler import MemoryContainerCrawler
from plugins.systems.memory_host_crawler import MemoryHostCrawler
from plugins.systems.memory_vm_crawler import MemoryVmCrawler
from plugins.systems.metric_container_crawler import MetricContainerCrawler
from plugins.systems.metric_host_crawler import MetricHostCrawler
from plugins.systems.metric_vm_crawler import MetricVmCrawler
from plugins.systems.os_container_crawler import OSContainerCrawler
from plugins.systems.os_host_crawler import OSHostCrawler
from plugins.systems.os_vm_crawler import os_vm_crawler
from plugins.systems.package_container_crawler import PackageContainerCrawler
from plugins.systems.package_host_crawler import PackageHostCrawler
from plugins.systems.process_container_crawler import ProcessContainerCrawler
from plugins.systems.process_host_crawler import ProcessHostCrawler
from plugins.systems.process_vm_crawler import process_vm_crawler
from container import Container
from utils.crawler_exceptions import CrawlError
from utils.features import (
OSFeature,
ConfigFeature,
DiskFeature,
PackageFeature,
MemoryFeature,
CpuFeature,
InterfaceFeature,
LoadFeature,
DockerPSFeature,
JarFeature)
# for OUTVM psvmi
class DummyContainer(Container):
def __init__(self, long_id):
self.pid = '1234'
self.long_id = long_id
def get_memory_cgroup_path(self, node):
return '/cgroup/%s' % node
def get_cpu_cgroup_path(self, node):
return '/cgroup/%s' % node
# for OUTVM psvmi
psvmi_sysinfo = namedtuple('psvmi_sysinfo',
'''boottime ipaddr osdistro osname osplatform osrelease
ostype osversion memory_used memory_buffered
memory_cached memory_free''')
psvmi_memory = namedtuple(
'psvmi_memory',
'memory_used memory_buffered memory_cached memory_free')
psvmi_interface = namedtuple(
'psvmi_interface',
'ifname bytes_sent bytes_recv packets_sent packets_recv errout errin')
os_stat = namedtuple(
'os_stat',
'''st_mode st_gid st_uid st_atime st_ctime st_mtime st_size''')
def mocked_os_walk(root_dir):
files = ['file1', 'file2', 'file3']
dirs = ['dir']
yield ('/', dirs, files)
# simulate the os_walk behavior (if a dir is deleted, we don't walk it)
if '/dir' in dirs:
files = ['file4']
dirs = []
yield ('/dir', dirs, files)
def mocked_os_walk_for_avoidsetns(root_dir):
files = ['file1', 'file2', 'file3']
dirs = ['dir']
yield ('/1/2/3', dirs, files)
# simulate the os_walk behavior (if a dir is deleted, we don't walk it)
if '/1/2/3/dir' in dirs:
files = ['file4']
dirs = []
yield ('/dir', dirs, files)
# XXX can't do self.count = for some reason
mcount = 0
class MockedMemCgroupFile(mock.Mock):
def __init__(self):
pass
def readline(self):
return '2'
def __iter__(self):
return self
def next(self):
global mcount
mcount += 1
if mcount == 1:
return 'total_cache 100'
if mcount == 2:
return 'total_active_file 200'
else:
raise StopIteration()
# XXX can't do self.count = for some reason
ccount = 0
ccount2 = 0
class MockedCpuCgroupFile(mock.Mock):
def __init__(self):
pass
def readline(self):
global ccount2
ccount2 += 1
if ccount2 == 1:
return '1e7'
else:
return '2e7'
def __iter__(self):
return self
def next(self):
global ccount
ccount += 1
if ccount == 1:
return 'system 20'
if ccount == 2:
return 'user 20'
else:
raise StopIteration()
class MockedFile(mock.Mock):
def __init__(self):
pass
def read(self):
return 'content'
def mocked_codecs_open(filename, mode, encoding, errors):
m = mock.Mock()
m.__enter__ = mock.Mock(return_value=MockedFile())
m.__exit__ = mock.Mock(return_value=False)
return m
def mocked_cpu_cgroup_open(filename, mode):
m = mock.Mock()
m.__enter__ = mock.Mock(return_value=MockedCpuCgroupFile())
m.__exit__ = mock.Mock(return_value=False)
print filename
return m
def mocked_memory_cgroup_open(filename, mode):
m = mock.Mock()
m.__enter__ = mock.Mock(return_value=MockedMemCgroupFile())
m.__exit__ = mock.Mock(return_value=False)
print filename
return m
partition = namedtuple('partition', 'device fstype mountpoint opts')
pdiskusage = namedtuple('pdiskusage', 'percent total')
meminfo = namedtuple('meminfo', 'rss vms')
ioinfo = namedtuple('ioinfo', 'read_bytes write_bytes')
psutils_memory = namedtuple('psutils_memory', 'used free buffers cached')
psutils_cpu = namedtuple(
'psutils_cpu',
'idle nice user iowait system irq steal')
psutils_net = namedtuple(
'psutils_net',
'bytes_sent bytes_recv packets_sent packets_recv errout errin')
def mocked_disk_partitions(all):
return [partition('/dev/a', 'type', '/a', 'opts'),
partition('/dev/b', 'type', '/b', 'opts')]
class Connection():
def __init__(self):
self.laddr = ['1.1.1.1', '22']
self.raddr = ['2.2.2.2', '22']
self.status = 'Established'
class Process():
def __init__(self, name):
self.name = name
self.cmdline = ['cmd']
self.pid = 123
self.status = 'Running'
self.cwd = '/bin'
self.ppid = 1
self.create_time = 1000
def num_threads(self):
return 1
def username(self):
return 'don quijote'
def get_open_files(self):
return []
def get_connections(self):
return [Connection()]
def get_memory_info(self):
return meminfo(10, 20)
def get_io_counters(self):
return ioinfo(10, 20)
def get_cpu_percent(self, interval):
return 30
def get_memory_percent(self):
return 30
STAT_DIR_MODE = 16749
def mocked_os_lstat(path):
print path
if path == '/':
return os_stat(STAT_DIR_MODE, 2, 3, 4, 5, 6, 7)
elif path == '/file1':
return os_stat(1, 2, 3, 4, 5, 6, 7)
elif path == '/file2':
return os_stat(1, 2, 3, 4, 5, 6, 7)
elif path == '/file3':
return os_stat(1, 2, 3, 4, 5, 6, 7)
elif path == '/dir':
return os_stat(STAT_DIR_MODE, 2, 3, 4, 5, 6, 7)
else:
return os_stat(1, 2, 3, 4, 5, 6, 7)
def mocked_run_as_another_namespace(pid, ns, function, *args, **kwargs):
result = function(*args)
# if res is a generator (i.e. function uses yield)
if isinstance(result, types.GeneratorType):
result = list(result)
return result
def throw_os_error(*args, **kvargs):
raise OSError()
class PluginTests(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self, *args):
pass
@mock.patch('utils.os_utils.time.time',
side_effect=lambda: 1001)
@mock.patch('utils.os_utils.platform.platform',
side_effect=lambda: 'platform')
@mock.patch('utils.os_utils.utils.misc.get_host_ip4_addresses',
side_effect=lambda: ['1.1.1.1'])
@mock.patch('utils.os_utils.psutil.boot_time',
side_effect=lambda: 1000)
@mock.patch('utils.os_utils.platform.system',
side_effect=lambda: 'linux')
@mock.patch('utils.os_utils.platform.machine',
side_effect=lambda: 'machine')
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_host_cawler_plugin(self, *args):
fc = OSHostCrawler()
for os in fc.crawl():
print os
assert os == (
'linux',
OSFeature(
boottime=1000,
uptime=1,
ipaddr=['1.1.1.1'],
os='os',
os_version='os_version',
os_kernel='platform',
architecture='machine'),
'os')
for i, arg in enumerate(args):
if i > 0: # time.time is called more than once
continue
assert arg.call_count == 1
@mock.patch('utils.os_utils.platform.system',
side_effect=throw_os_error)
def test_os_host_crawler_plugin_failure(self, *args):
fc = OSHostCrawler()
with self.assertRaises(OSError):
for os in fc.crawl():
pass
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_host_crawler_plugin_mountpoint_mode(self, *args):
fc = OSHostCrawler()
for os in fc.crawl(root_dir='/a'):
print os
assert os == (
'linux',
OSFeature(
boottime='unsupported',
uptime='unsupported',
ipaddr='0.0.0.0',
os='os',
os_version='os_version',
os_kernel='unknown',
architecture='unknown'),
'os')
for i, arg in enumerate(args):
assert arg.call_count == 1
@mock.patch('utils.os_utils.osinfo.get_osinfo',
side_effect=throw_os_error)
def test_os_host_crawler_plugin_mountpoint_mode_failure(self, *args):
fc = OSHostCrawler()
with self.assertRaises(OSError):
for os in fc.crawl(root_dir='/a'):
pass
@mock.patch('utils.os_utils.time.time',
side_effect=lambda: 1001)
@mock.patch('utils.os_utils.platform.platform',
side_effect=lambda: 'platform')
@mock.patch('utils.os_utils.utils.misc.get_host_ip4_addresses',
side_effect=lambda: ['1.1.1.1'])
@mock.patch('utils.os_utils.psutil.boot_time',
side_effect=lambda: 1000)
@mock.patch('utils.os_utils.platform.system',
side_effect=lambda: 'linux')
@mock.patch('utils.os_utils.platform.machine',
side_effect=lambda: 'machine')
@mock.patch(
("plugins.systems.os_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.os_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_container_crawler_plugin(self, *args):
fc = OSContainerCrawler()
for os in fc.crawl(container_id=123):
print os
assert os == (
'linux',
OSFeature(
boottime=1000,
uptime=1,
ipaddr=['1.1.1.1'],
os='os',
os_version='os_version',
os_kernel='platform',
architecture='machine'),
'os')
for i, arg in enumerate(args):
if i > 0: # time.time is called more than once
continue
assert arg.call_count == 1
@mock.patch(
("plugins.systems.os_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.os_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.os_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'os',
'version': 'os_version'})
def test_os_container_crawler_plugin_avoidsetns(self, *args):
fc = OSContainerCrawler()
for os in fc.crawl(container_id=123, avoid_setns=True):
print os
assert os == (
'linux',
OSFeature(
boottime='unsupported',
uptime='unsupported',
ipaddr='0.0.0.0',
os='os',
os_version='os_version',
os_kernel='unknown',
architecture='unknown'),
'os')
for i, arg in enumerate(args):
print i, arg
if i == 0:
# get_osinfo()
assert arg.call_count == 1
arg.assert_called_with(mount_point='/a/b/c')
elif i == 1:
# get_docker_container_rootfs_path
assert arg.call_count == 1
arg.assert_called_with(123)
else:
# exec_dockerinspect
assert arg.call_count == 1
arg.assert_called_with(123)
@mock.patch(
("plugins.systems.os_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.os_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=throw_os_error)
def test_os_container_crawler_plugin_avoidsetns_failure(self, *args):
fc = OSContainerCrawler()
with self.assertRaises(OSError):
for os in fc.crawl(container_id=123, avoid_setns=True):
pass
@mock.patch('plugins.systems.os_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.os_vm_crawler.psvmi.system_info',
side_effect=lambda vmc: psvmi_sysinfo(1000,
'1.1.1.1',
'osdistro',
'osname',
'osplatform',
'osrelease',
'ostype',
'osversion',
1000000,
100000,
100000,
100000))
@mock.patch('plugins.systems.os_vm_crawler.psvmi')
def test_os_vm_crawler_plugin_without_vm(self, *args):
fc = os_vm_crawler()
for os in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert os == (
'ostype',
OSFeature(
boottime=1000,
uptime='unknown',
ipaddr='1.1.1.1',
os='ostype',
os_version='osversion',
os_kernel='osrelease',
architecture='osplatform'),
'os')
pass
assert args[1].call_count == 1
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_host_crawler(self, *args):
fc = FileHostCrawler()
for (k, f, fname) in fc.crawl():
print f
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'dir', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3',
'/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 6
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_host_crawler_with_exclude_dirs(self, *args):
fc = FileHostCrawler()
for (k, f, fname) in fc.crawl(exclude_dirs=['dir']):
print f
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3']
assert f.path not in ['/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 4
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=throw_os_error)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_host_crawler_failure(self, *args):
fc = FileHostCrawler()
with self.assertRaises(OSError):
for (k, f, fname) in fc.crawl(root_dir='/a/b/c'):
pass
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler(self, *args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/'):
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'dir', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3',
'/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 6
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch(
("plugins.systems.jar_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.jar_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
def test_jar_container_crawler_plugin(self, *args):
tmpdir = tempfile.mkdtemp()
jar_file_name = 'myfile.jar'
# Ensure the file is read/write by the creator only
saved_umask = os.umask(0077)
path = os.path.join(tmpdir, jar_file_name)
try:
with ZipFile(path, "w") as myjar:
myjar.writestr(ZipInfo('first.class',(1980,1,1,1,1,1)), "first secrets!")
myjar.writestr(ZipInfo('second.class',(1980,1,1,1,1,1)), "second secrets!")
myjar.writestr(ZipInfo('second.txt',(1980,1,1,1,1,1)), "second secrets!")
fc = JarContainerCrawler()
jars = list(fc.crawl(root_dir=tmpdir))
#jars = list(jar_utils.crawl_jar_files(root_dir=tmpdir))
print jars
jar_feature = jars[0][1]
assert 'myfile.jar' == jar_feature.name
assert '48ac85a26ffa7ff5cefdd5c73a9fb888' == jar_feature.jarhash
assert ['ddc6eff37020aa858e26b1ba8a49ee0e',
'cbe2a13eb99c1c8ac5f30d0a04f8c492'] == jar_feature.hashes
assert 'jar' == jars[0][2]
except IOError as e:
print 'IOError'
finally:
os.remove(path)
@mock.patch(
("plugins.systems.jar_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.jar_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/tmp')
def test_jar_container_crawler_avoidsetns(self, *args):
tmpdir = tempfile.mkdtemp()
jar_file_name = 'myfile.jar'
# Ensure the file is read/write by the creator only
saved_umask = os.umask(0077)
path = os.path.join(tmpdir, jar_file_name)
try:
with ZipFile(path, "w") as myjar:
myjar.writestr(ZipInfo('first.class',(1980,1,1,1,1,1)), "first secrets!")
myjar.writestr(ZipInfo('second.class',(1980,1,1,1,1,1)), "second secrets!")
myjar.writestr(ZipInfo('second.txt',(1980,1,1,1,1,1)), "second secrets!")
fc = JarContainerCrawler()
jars = list(fc.crawl(root_dir=os.path.basename(tmpdir), avoid_setns=True))
print jars
jar_feature = jars[0][1]
assert 'myfile.jar' == jar_feature.name
assert '48ac85a26ffa7ff5cefdd5c73a9fb888' == jar_feature.jarhash
assert ['ddc6eff37020aa858e26b1ba8a49ee0e',
'cbe2a13eb99c1c8ac5f30d0a04f8c492'] == jar_feature.hashes
assert 'jar' == jars[0][2]
except IOError as e:
print 'IOError'
finally:
os.remove(path)
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch('utils.file_utils.os.walk',
side_effect=throw_os_error)
@mock.patch(
("plugins.systems.file_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_failure(self, *args):
fc = FileContainerCrawler()
with self.assertRaises(OSError):
for (k, f, fname) in fc.crawl(root_dir='/a/b/c'):
pass
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk_for_avoidsetns)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_avoidsetns(self, *args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/', avoid_setns=True):
print f
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'dir', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3',
'/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 6
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/1/2/3')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/1/2/3')
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)<|fim▁hole|> side_effect=mocked_os_lstat)
def test_file_container_crawler_with_exclude_dirs(self, *args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/',
exclude_dirs=['dir']):
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3']
assert f.path not in ['/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 4
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/')
@mock.patch(
("plugins.systems.file_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.file_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.file_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk_for_avoidsetns)
@mock.patch('utils.file_utils.os.lstat',
side_effect=mocked_os_lstat)
def test_file_container_crawler_avoidsetns_with_exclude_dirs(
self,
*
args):
fc = FileContainerCrawler()
for (k, f, fname) in fc.crawl(root_dir='/',
avoid_setns=True,
exclude_dirs=['/dir']):
assert fname == "file"
assert f.mode in [1, STAT_DIR_MODE] and f.gid == 2 and f.uid == 3
assert f.atime == 4 and f.ctime == 5
assert f.mtime == 6 and f.size == 7
assert f.name in ['', 'file1', 'file2', 'file3', 'file4']
assert f.path in ['/', '/file1', '/file2', '/file3']
assert f.path not in ['/dir', '/dir/file4']
assert f.type in ['file', 'dir']
assert f.linksto is None
assert args[0].call_count == 4
assert args[1].call_count == 1 # oswalk
args[1].assert_called_with('/1/2/3')
assert args[2].call_count == 2 # isdir
args[2].assert_called_with('/1/2/3')
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_host_crawler(self, *args):
fc = ConfigHostCrawler()
for (k, f, fname) in fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=False):
assert fname == "config"
assert f == ConfigFeature(name='file1', content='content',
path='/etc/file1')
assert args[0].call_count == 1 # lstat
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.walk',
side_effect=lambda p: [
('/', [], ['file1', 'file2', 'file3.conf'])])
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.isfile',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.getsize',
side_effect=lambda p: 1000)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_host_crawler_with_discover(self, *args):
fc = ConfigHostCrawler()
configs = fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=True)
print configs
assert set(configs) == set([('/file3.conf',
ConfigFeature(name='file3.conf',
content='content',
path='/file3.conf'),
'config'),
('/etc/file1',
ConfigFeature(name='file1',
content='content',
path='/etc/file1'),
'config')])
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.config_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_container_crawler(self, *args):
fc = ConfigContainerCrawler()
for (k, f, fname) in fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=False):
assert fname == "config"
assert f == ConfigFeature(name='file1', content='content',
path='/etc/file1')
assert args[0].call_count == 1 # codecs open
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.config_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.walk',
side_effect=lambda p: [
('/', [], ['file1', 'file2', 'file3.conf'])])
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.isfile',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.getsize',
side_effect=lambda p: 1000)
def test_config_container_crawler_discover(self, *args):
fc = ConfigContainerCrawler()
configs = fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=True)
assert set(configs) == set([('/file3.conf',
ConfigFeature(name='file3.conf',
content='content',
path='/file3.conf'),
'config'),
('/etc/file1',
ConfigFeature(name='file1',
content='content',
path='/etc/file1'),
'config')])
@mock.patch(
("plugins.systems.config_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.config_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_container_crawler_avoidsetns(self, *args):
fc = ConfigContainerCrawler()
for (k, f, fname) in fc.crawl(known_config_files=['/etc/file1'],
discover_config_files=False,
avoid_setns=True):
assert fname == "config"
assert f == ConfigFeature(name='file1', content='content',
path='/etc/file1')
assert args[0].call_count == 1 # lstat
@mock.patch(
("plugins.systems.config_container_crawler."
"run_as_another_namespace"),
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.config_container_crawler."
"utils.dockerutils.exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.config_container_crawler.utils.dockerutils."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/1/2/3')
@mock.patch('utils.config_utils.os.path.isdir',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.walk',
side_effect=lambda p: [
('/', [], ['file1', 'file2', 'file3.conf'])])
@mock.patch('utils.config_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.isfile',
side_effect=lambda p: True)
@mock.patch('utils.config_utils.os.path.getsize',
side_effect=lambda p: 1000)
@mock.patch('utils.config_utils.os.lstat',
side_effect=mocked_os_lstat)
@mock.patch('utils.config_utils.codecs.open',
side_effect=mocked_codecs_open)
def test_config_container_crawler_avoidsetns_discover(self, *args):
fc = ConfigContainerCrawler()
configs = fc.crawl(known_config_files=['/etc/file1'],
avoid_setns=True,
discover_config_files=True)
assert set(configs) == set([('/file3.conf',
ConfigFeature(name='file3.conf',
content='content',
path='/file3.conf'),
'config'),
('/etc/file1',
ConfigFeature(name='file1',
content='content',
path='/etc/file1'),
'config')])
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=lambda a, b, c: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_host_crawler_dpkg(self, *args):
fc = PackageHostCrawler()
for (k, f, fname) in fc.crawl():
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/dpkg', 0)
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=throw_os_error)
def test_package_host_crawler_dpkg_failure(self, *args):
fc = PackageHostCrawler()
with self.assertRaises(CrawlError):
for (k, f, fname) in fc.crawl():
pass
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/dpkg', 0)
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'redhat',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_rpm_packages',
side_effect=lambda a, b, c, d: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_host_crawler_rpm(self, *args):
fc = PackageHostCrawler()
for (k, f, fname) in fc.crawl():
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/rpm', 0, False)
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch(
'plugins.systems.package_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=lambda a, b, c: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_container_crawler_dpkg(self, *args):
fc = PackageContainerCrawler()
for (k, f, fname) in fc.crawl():
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
args[0].assert_called_with('/', 'var/lib/dpkg', 0)
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.package_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.package_container_crawler."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True if 'dpkg' in p else False)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=throw_os_error)
def test_package_container_crawler_dpkg_failure(self, *args):
fc = PackageContainerCrawler()
with self.assertRaises(CrawlError):
for (k, f, fname) in fc.crawl():
pass
# get_dpkg_packages is called a second time after the first failure.
# first time is OUTCONTAINER mode with setns
# second time is OUTCONTAINER mode with avoid_setns
assert args[0].call_count == 2
args[0].assert_called_with('/a/b/c', 'var/lib/dpkg', 0)
args[2].assert_called_with(mount_point='/a/b/c') # get_osinfo()
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.package_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.package_container_crawler."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'redhat',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True if 'rpm' in p else False)
@mock.patch('utils.package_utils.get_rpm_packages',
side_effect=throw_os_error)
def test_package_container_crawler_rpm_failure(self, *args):
fc = PackageContainerCrawler()
with self.assertRaises(CrawlError):
for (k, f, fname) in fc.crawl():
pass
# get_dpkg_packages is called a second time after the first failure.
# first time is OUTCONTAINER mode with setns
# second time is OUTCONTAINER mode with avoid_setns
assert args[0].call_count == 2
args[0].assert_called_with('/a/b/c', 'var/lib/rpm', 0, True)
args[2].assert_called_with(mount_point='/a/b/c') # get_osinfo()
@mock.patch(
("plugins.systems.package_container_crawler."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
("plugins.systems.package_container_crawler."
"get_docker_container_rootfs_path"),
side_effect=lambda long_id: '/a/b/c')
@mock.patch(
'utils.package_utils.osinfo.get_osinfo',
side_effect=lambda mount_point=None: {
'os': 'ubuntu',
'version': '123'})
@mock.patch('utils.package_utils.os.path.exists',
side_effect=lambda p: True)
@mock.patch('utils.package_utils.get_dpkg_packages',
side_effect=lambda a, b, c: [('pkg1',
PackageFeature(None, 'pkg1',
123, 'v1',
'x86'))])
def test_package_container_crawler_avoidsetns(self, *args):
fc = PackageContainerCrawler()
for (k, f, fname) in fc.crawl(avoid_setns=True):
assert fname == "package"
assert f == PackageFeature(
installed=None,
pkgname='pkg1',
pkgsize=123,
pkgversion='v1',
pkgarchitecture='x86')
assert args[0].call_count == 1
@mock.patch('plugins.systems.process_host_crawler.psutil.process_iter',
side_effect=lambda: [Process('init')])
def test_process_host_crawler(self, *args):
fc = ProcessHostCrawler()
for (k, f, fname) in fc.crawl():
print f
assert fname == "process"
assert f.pname == 'init'
assert f.cmd == 'cmd'
assert f.pid == 123
assert args[0].call_count == 1
@mock.patch(
("plugins.systems.process_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
@mock.patch(
'plugins.systems.process_container_crawler.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch(
'plugins.systems.process_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
def test_process_container_crawler(self, *args):
fc = ProcessContainerCrawler()
for (k, f, fname) in fc.crawl('123'):
print f
assert fname == "process"
assert f.pname == 'init'
assert f.cmd == 'cmd'
assert f.pid == 123
assert args[0].call_count == 1
@mock.patch('plugins.systems.process_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.process_vm_crawler.psvmi.process_iter',
side_effect=lambda vmc: [Process('init')])
@mock.patch('plugins.systems.process_vm_crawler.psvmi')
def test_process_vm_crawler(self, *args):
fc = process_vm_crawler()
for (k, f, fname) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
print f
assert fname == "process"
assert f.pname == 'init'
assert f.cmd == 'cmd'
assert f.pid == 123
assert args[1].call_count == 1 # process_iter
@mock.patch('utils.disk_utils.psutil.disk_partitions',
side_effect=mocked_disk_partitions)
@mock.patch('utils.disk_utils.psutil.disk_usage',
side_effect=lambda x: pdiskusage(10, 100))
def test_crawl_disk_partitions_invm_mode(self, *args):
fc = DiskHostCrawler()
disks = fc.crawl()
assert set(disks) == set([('/a',
DiskFeature(partitionname='/dev/a',
freepct=90.0,
fstype='type',
mountpt='/a',
mountopts='opts',
partitionsize=100),
'disk'),
('/b',
DiskFeature(partitionname='/dev/b',
freepct=90.0,
fstype='type',
mountpt='/b',
mountopts='opts',
partitionsize=100),
'disk')])
@mock.patch(
'plugins.systems.disk_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('utils.disk_utils.psutil.disk_partitions',
side_effect=mocked_disk_partitions)
@mock.patch('utils.disk_utils.psutil.disk_usage',
side_effect=lambda x: pdiskusage(10, 100))
@mock.patch(
("plugins.systems.disk_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
def test_crawl_disk_partitions_outcontainer_mode(self, *args):
fc = DiskContainerCrawler()
disks = fc.crawl('123')
assert set(disks) == set([('/a',
DiskFeature(partitionname='/dev/a',
freepct=90.0,
fstype='type',
mountpt='/a',
mountopts='opts',
partitionsize=100),
'disk'),
('/b',
DiskFeature(partitionname='/dev/b',
freepct=90.0,
fstype='type',
mountpt='/b',
mountopts='opts',
partitionsize=100),
'disk')])
@mock.patch('utils.metric_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
def test_crawl_metrics_invm_mode(self, *args):
fc = MetricHostCrawler()
for (k, f, t) in fc.crawl():
assert f.cpupct == 30.0
assert f.mempct == 30.0
assert f.pname == 'init'
assert f.pid == 123
assert f.rss == 10
assert f.status == 'Running'
assert f.vms == 20
assert f.read == 10
assert f.write == 20
assert args[0].call_count == 1
@mock.patch('utils.metric_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch('utils.metric_utils.round',
side_effect=throw_os_error)
def test_crawl_metrics_invm_mode_failure(self, *args):
with self.assertRaises(OSError):
fc = MetricHostCrawler()
for ff in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch('utils.metric_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch(
'plugins.systems.metric_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.disk_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
def test_crawl_metrics_outcontainer_mode(self, *args):
fc = MetricContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f.cpupct == 30.0
assert f.mempct == 30.0
assert f.pname == 'init'
assert f.pid == 123
assert f.rss == 10
assert f.status == 'Running'
assert f.vms == 20
assert f.read == 10
assert f.write == 20
assert args[0].call_count == 1
@mock.patch('plugins.systems.metric_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.metric_vm_crawler.psvmi.process_iter',
side_effect=lambda vmc: [Process('init')])
@mock.patch(
("plugins.systems.metric_vm_crawler."
"MetricVmCrawler._crawl_metrics_cpu_percent"),
side_effect=lambda proc: 30.0)
@mock.patch('plugins.systems.metric_vm_crawler.psvmi')
def test_crawl_metrics_vm_mode(self, *args):
fc = MetricVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f.cpupct == 30.0
assert f.mempct == 30.0
assert f.pname == 'init'
assert f.pid == 123
assert f.rss == 10
assert f.status == 'Running'
assert f.vms == 20
assert f.read == 10
assert f.write == 20
assert args[1].call_count == 1 # process_iter
@mock.patch('utils.connection_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
def test_crawl_connections_invm_mode(self, *args):
fc = ConnectionHostCrawler()
for (k, f, t) in fc.crawl():
assert f.localipaddr == '1.1.1.1'
assert f.remoteipaddr == '2.2.2.2'
assert f.localport == '22'
assert f.remoteport == '22'
assert args[0].call_count == 1
@mock.patch('utils.connection_utils.psutil.process_iter',
side_effect=lambda: [Process('init')])
@mock.patch(
'plugins.systems.connection_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
("plugins.systems.connection_container_crawler.utils.dockerutils."
"exec_dockerinspect"),
side_effect=lambda long_id: {'State': {'Pid': 123}})
def test_crawl_connections_outcontainer_mode(self, *args):
fc = ConnectionContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f.localipaddr == '1.1.1.1'
assert f.remoteipaddr == '2.2.2.2'
assert f.localport == '22'
assert f.remoteport == '22'
assert args[0].call_count == 1
@mock.patch('plugins.systems.connection_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.connection_vm_crawler.psvmi.process_iter',
side_effect=lambda vmc: [Process('init')])
@mock.patch('plugins.systems.connection_vm_crawler.psvmi')
def test_crawl_connections_outvm_mode(self, *args):
fc = ConnectionVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f.localipaddr == '1.1.1.1'
assert f.remoteipaddr == '2.2.2.2'
assert f.localport == '22'
assert f.remoteport == '22'
assert args[1].call_count == 1
@mock.patch('plugins.systems.memory_host_crawler.psutil.virtual_memory',
side_effect=lambda: psutils_memory(2, 2, 3, 4))
def test_crawl_memory_invm_mode(self, *args):
fc = MemoryHostCrawler()
for (k, f, t) in fc.crawl():
assert f == MemoryFeature(
memory_used=2,
memory_buffered=3,
memory_cached=4,
memory_free=2,
memory_util_percentage=50)
assert args[0].call_count == 1
@mock.patch('plugins.systems.memory_host_crawler.psutil.virtual_memory',
side_effect=throw_os_error)
def test_crawl_memory_invm_mode_failure(self, *args):
fc = MemoryHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch('plugins.systems.memory_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.memory_vm_crawler.psvmi.system_memory_info',
side_effect=lambda vmc: psvmi_memory(10, 20, 30, 40))
@mock.patch('plugins.systems.memory_vm_crawler.psvmi')
def test_crawl_memory_outvm_mode(self, *args):
fc = MemoryVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f == MemoryFeature(
memory_used=10,
memory_buffered=20,
memory_cached=30,
memory_free=40,
memory_util_percentage=20)
assert args[1].call_count == 1
@mock.patch(
'plugins.systems.memory_container_crawler.psutil.virtual_memory',
side_effect=lambda: psutils_memory(
10,
10,
3,
10))
@mock.patch('plugins.systems.memory_container_crawler.open',
side_effect=mocked_memory_cgroup_open)
@mock.patch('plugins.systems.memory_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_memory_outcontainer_mode(self, *args):
fc = MemoryContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == MemoryFeature(
memory_used=2,
memory_buffered=200,
memory_cached=100,
memory_free=0,
memory_util_percentage=100)
assert args[1].call_count == 3 # 3 cgroup files
@mock.patch(
'plugins.systems.memory_container_crawler.psutil.virtual_memory',
side_effect=lambda: psutils_memory(
10,
10,
3,
10))
@mock.patch('plugins.systems.memory_container_crawler.open',
side_effect=throw_os_error)
@mock.patch('plugins.systems.memory_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_memory_outcontainer_mode_failure(self, *args):
fc = MemoryContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[1].call_count == 1 # 1 cgroup files
@mock.patch(
'plugins.systems.cpu_host_crawler.psutil.cpu_times_percent',
side_effect=lambda percpu: [
psutils_cpu(
10,
20,
30,
40,
50,
60,
70)])
def test_crawl_cpu_invm_mode(self, *args):
fc = CpuHostCrawler()
for (k, f, t) in fc.crawl():
assert f == CpuFeature(
cpu_idle=10,
cpu_nice=20,
cpu_user=30,
cpu_wait=40,
cpu_system=50,
cpu_interrupt=60,
cpu_steal=70,
cpu_util=90)
assert args[0].call_count == 1
@mock.patch('plugins.systems.cpu_host_crawler.psutil.cpu_times_percent',
side_effect=throw_os_error)
def test_crawl_cpu_invm_mode_failure(self, *args):
fc = CpuHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.cpu_container_crawler.psutil.cpu_times_percent',
side_effect=lambda percpu: [
psutils_cpu(
10,
20,
30,
40,
50,
60,
70)])
@mock.patch('plugins.systems.cpu_container_crawler.CpuContainerCrawler._get_scaling_factor',
side_effect=lambda a,b: 1.0)
@mock.patch('plugins.systems.cpu_container_crawler.time.sleep')
@mock.patch('plugins.systems.cpu_container_crawler.open',
side_effect=mocked_cpu_cgroup_open)
@mock.patch('plugins.systems.cpu_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_cpu_outcontainer_mode(self, *args):
fc = CpuContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == CpuFeature(
cpu_idle=90.0,
cpu_nice=20,
cpu_user=5.0,
cpu_wait=40,
cpu_system=5.0,
cpu_interrupt=60,
cpu_steal=70,
cpu_util=10.0)
assert args[1].call_count == 3 # open for 3 cgroup files
@mock.patch(
'plugins.systems.cpu_container_crawler.psutil.cpu_times_percent',
side_effect=lambda percpu: [
psutils_cpu(
10,
20,
30,
40,
50,
60,
70)])
@mock.patch('plugins.systems.cpu_container_crawler.time.sleep')
@mock.patch('plugins.systems.cpu_container_crawler.open',
side_effect=throw_os_error)
@mock.patch('plugins.systems.cpu_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_cpu_outcontainer_mode_failure(self, *args):
fc = CpuContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.interface_host_crawler.psutil.net_io_counters',
side_effect=lambda pernic: {'interface1-unit-tests':
psutils_net(
10,
20,
30,
40,
50,
60)})
def test_crawl_interface_invm_mode(self, *args):
fc = InterfaceHostCrawler()
for (k, f, t) in fc.crawl():
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
for (k, f, t) in fc.crawl():
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
assert args[0].call_count == 2
@mock.patch(
'plugins.systems.interface_host_crawler.psutil.net_io_counters',
side_effect=throw_os_error)
def test_crawl_interface_invm_mode_failure(self, *args):
fc = InterfaceHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
# Each crawl in crawlutils.py instantiates a FeaturesCrawler object
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 2
@mock.patch('plugins.systems.interface_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
@mock.patch(
'plugins.systems.interface_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch(
'plugins.systems.interface_container_crawler.psutil.net_io_counters',
side_effect=lambda pernic: {'eth0':
psutils_net(
10,
20,
30,
40,
50,
60)})
def test_crawl_interface_outcontainer_mode(self, *args):
fc = InterfaceContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
for (k, f, t) in fc.crawl('123'):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
assert args[0].call_count == 2
assert args[1].call_count == 2
@mock.patch('plugins.systems.interface_vm_crawler.psvmi.context_init',
side_effect=lambda dn1, dn2, kv, d, a: 1000)
@mock.patch('plugins.systems.interface_vm_crawler.psvmi.interface_iter',
side_effect=lambda vmc: [psvmi_interface(
'eth1', 10, 20, 30, 40, 50, 60)])
@mock.patch('plugins.systems.interface_vm_crawler.psvmi')
def test_crawl_interface_outvm_mode(self, *args):
fc = InterfaceVmCrawler()
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
for (k, f, t) in fc.crawl(vm_desc=('dn', '2.6', 'ubuntu', 'x86')):
assert f == InterfaceFeature(
if_octets_tx=0,
if_octets_rx=0,
if_packets_tx=0,
if_packets_rx=0,
if_errors_tx=0,
if_errors_rx=0)
assert args[1].call_count == 2
assert args[2].call_count == 2
@mock.patch('plugins.systems.load_host_crawler.os.getloadavg',
side_effect=lambda: [1, 2, 3])
def test_crawl_load_invm_mode(self, *args):
fc = LoadHostCrawler()
for (k, f, t) in fc.crawl():
assert f == LoadFeature(shortterm=1, midterm=2, longterm=2)
assert args[0].call_count == 1
@mock.patch('plugins.systems.load_host_crawler.os.getloadavg',
side_effect=throw_os_error)
def test_crawl_load_invm_mode_failure(self, *args):
fc = LoadHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.load_container_crawler.run_as_another_namespace',
side_effect=mocked_run_as_another_namespace)
@mock.patch('plugins.systems.load_container_crawler.os.getloadavg',
side_effect=lambda: [1, 2, 3])
@mock.patch('plugins.systems.load_container_crawler.DockerContainer',
side_effect=lambda container_id: DummyContainer(container_id))
def test_crawl_load_outcontainer_mode(self, *args):
fc = LoadContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == LoadFeature(shortterm=1, midterm=2, longterm=2)
assert args[1].call_count == 1
assert args[2].call_count == 1
@mock.patch('plugins.systems.dockerps_host_crawler.exec_dockerps',
side_effect=lambda: [{'State': {'Running': True},
'Image': 'reg/image:latest',
'Config': {'Cmd': 'command'},
'Name': 'name',
'Id': 'id'}])
def test_crawl_dockerps_invm_mode(self, *args):
fc = DockerpsHostCrawler()
for (k, f, t) in fc.crawl():
assert f == DockerPSFeature(
Status=True,
Created=0,
Image='reg/image:latest',
Ports=[],
Command='command',
Names='name',
Id='id')
assert args[0].call_count == 1
@mock.patch('plugins.systems.dockerps_host_crawler.exec_dockerps',
side_effect=throw_os_error)
def test_crawl_dockerps_invm_mode_failure(self, *args):
fc = DockerpsHostCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl():
pass
assert args[0].call_count == 1
@mock.patch('plugins.systems.dockerhistory_container_crawler.exec_docker_history',
side_effect=lambda long_id: [
{'Id': 'image1', 'random': 'abc'},
{'Id': 'image2', 'random': 'abc'}])
def test_crawl_dockerhistory_outcontainer_mode(self, *args):
fc = DockerhistoryContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == {'history': [{'Id': 'image1', 'random': 'abc'},
{'Id': 'image2', 'random': 'abc'}]}
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.dockerhistory_container_crawler.exec_docker_history',
side_effect=throw_os_error)
def test_crawl_dockerhistory_outcontainer_mode_failure(self, *args):
fc = DockerhistoryContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.dockerinspect_container_crawler.exec_dockerinspect',
side_effect=lambda long_id: {
'Id': 'image1',
'random': 'abc'})
def test_crawl_dockerinspect_outcontainer_mode(self, *args):
fc = DockerinspectContainerCrawler()
for (k, f, t) in fc.crawl('123'):
assert f == {'Id': 'image1', 'random': 'abc'}
assert args[0].call_count == 1
@mock.patch(
'plugins.systems.dockerinspect_container_crawler.exec_dockerinspect',
side_effect=throw_os_error)
def test_crawl_dockerinspect_outcontainer_mode_failure(self, *args):
fc = DockerinspectContainerCrawler()
with self.assertRaises(OSError):
for (k, f, t) in fc.crawl('123'):
pass
assert args[0].call_count == 1<|fim▁end|>
|
@mock.patch('utils.file_utils.os.walk',
side_effect=mocked_os_walk)
@mock.patch('utils.file_utils.os.lstat',
|
<|file_name|>PartListSection.tsx<|end_file_name|><|fim▁begin|>import React, { useMemo } from 'react';
import { cx, css } from '@emotion/css';
import { MenuItem, WithContextMenu, MenuGroup, useTheme2 } from '@grafana/ui';
import { SelectableValue, GrafanaTheme2 } from '@grafana/data';
import { Seg } from './Seg';
import { unwrap } from './unwrap';
import { toSelectableValue } from './toSelectableValue';
import { AddButton } from './AddButton';
export type PartParams = Array<{
value: string;
options: (() => Promise<string[]>) | null;
}>;
type Props = {
parts: Array<{
name: string;
params: PartParams;
}>;
getNewPartOptions: () => Promise<SelectableValue[]>;
onChange: (partIndex: number, paramValues: string[]) => void;
onRemovePart: (index: number) => void;
onAddNewPart: (type: string) => void;
};
const renderRemovableNameMenuItems = (onClick: () => void) => {
return (
<MenuGroup label="">
<MenuItem label="remove" onClick={onClick} />
</MenuGroup>
);
};
const noRightMarginPaddingClass = css({
paddingRight: '0',
marginRight: '0',
});
const RemovableName = ({ name, onRemove }: { name: string; onRemove: () => void }) => {
return (
<WithContextMenu renderMenuItems={() => renderRemovableNameMenuItems(onRemove)}>
{({ openMenu }) => (<|fim▁hole|> )}
</WithContextMenu>
);
};
type PartProps = {
name: string;
params: PartParams;
onRemove: () => void;
onChange: (paramValues: string[]) => void;
};
const noHorizMarginPaddingClass = css({
paddingLeft: '0',
paddingRight: '0',
marginLeft: '0',
marginRight: '0',
});
const getPartClass = (theme: GrafanaTheme2) => {
return cx(
'gf-form-label',
css({
paddingLeft: '0',
// gf-form-label class makes certain css attributes incorrect
// for the selectbox-dropdown, so we have to "reset" them back
lineHeight: theme.typography.body.lineHeight,
fontSize: theme.typography.body.fontSize,
})
);
};
const Part = ({ name, params, onChange, onRemove }: PartProps): JSX.Element => {
const theme = useTheme2();
const partClass = useMemo(() => getPartClass(theme), [theme]);
const onParamChange = (par: string, i: number) => {
const newParams = params.map((p) => p.value);
newParams[i] = par;
onChange(newParams);
};
return (
<div className={partClass}>
<RemovableName name={name} onRemove={onRemove} />(
{params.map((p, i) => {
const { value, options } = p;
const isLast = i === params.length - 1;
const loadOptions =
options !== null ? () => options().then((items) => items.map(toSelectableValue)) : undefined;
return (
<React.Fragment key={i}>
<Seg
allowCustomValue
value={value}
buttonClassName={noHorizMarginPaddingClass}
loadOptions={loadOptions}
onChange={(v) => {
onParamChange(unwrap(v.value), i);
}}
/>
{!isLast && ','}
</React.Fragment>
);
})}
)
</div>
);
};
export const PartListSection = ({
parts,
getNewPartOptions,
onAddNewPart,
onRemovePart,
onChange,
}: Props): JSX.Element => {
return (
<>
{parts.map((part, index) => (
<Part
key={index}
name={part.name}
params={part.params}
onRemove={() => {
onRemovePart(index);
}}
onChange={(pars) => {
onChange(index, pars);
}}
/>
))}
<AddButton loadOptions={getNewPartOptions} onAdd={onAddNewPart} />
</>
);
};<|fim▁end|>
|
<button className={cx('gf-form-label', noRightMarginPaddingClass)} onClick={openMenu}>
{name}
</button>
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Unix SMB/CIFS implementation.
# Copyright (C) Jelmer Vernooij <[email protected]> 2007-2008
#
# Based on the original in EJS:
# Copyright (C) Andrew Tridgell <[email protected]> 2005
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""Samba 4."""
__docformat__ = "restructuredText"
import os
import sys
import samba.param
def source_tree_topdir():
'''return the top level directory (the one containing the source4 directory)'''
paths = [ "../../..", "../../../.." ]
for p in paths:
topdir = os.path.normpath(os.path.join(os.path.dirname(__file__), p))
if os.path.exists(os.path.join(topdir, 'source4')):
return topdir
raise RuntimeError("unable to find top level source directory")
def in_source_tree():
'''return True if we are running from within the samba source tree'''
try:
topdir = source_tree_topdir()
except RuntimeError:
return False
return True
import ldb
from samba._ldb import Ldb as _Ldb
class Ldb(_Ldb):
"""Simple Samba-specific LDB subclass that takes care
of setting up the modules dir, credentials pointers, etc.
Please note that this is intended to be for all Samba LDB files,
not necessarily the Sam database. For Sam-specific helper
functions see samdb.py.
"""
def __init__(self, url=None, lp=None, modules_dir=None, session_info=None,
credentials=None, flags=0, options=None):
"""Opens a Samba Ldb file.
:param url: Optional LDB URL to open
:param lp: Optional loadparm object
:param modules_dir: Optional modules directory
:param session_info: Optional session information
:param credentials: Optional credentials, defaults to anonymous.
:param flags: Optional LDB flags
:param options: Additional options (optional)
This is different from a regular Ldb file in that the Samba-specific
modules-dir is used by default and that credentials and session_info
can be passed through (required by some modules).
"""
if modules_dir is not None:
self.set_modules_dir(modules_dir)
else:
self.set_modules_dir(os.path.join(samba.param.modules_dir(), "ldb"))
if session_info is not None:
self.set_session_info(session_info)
if credentials is not None:
self.set_credentials(credentials)
if lp is not None:
self.set_loadparm(lp)
# This must be done before we load the schema, as these handlers for
# objectSid and objectGUID etc must take precedence over the 'binary
# attribute' declaration in the schema
self.register_samba_handlers()
# TODO set debug
def msg(l, text):
print text
#self.set_debug(msg)
self.set_utf8_casefold()
# Allow admins to force non-sync ldb for all databases
if lp is not None:
nosync_p = lp.get("nosync", "ldb")
if nosync_p is not None and nosync_p == True:
flags |= ldb.FLG_NOSYNC
self.set_create_perms(0600)
if url is not None:
self.connect(url, flags, options)
def searchone(self, attribute, basedn=None, expression=None,
scope=ldb.SCOPE_BASE):
"""Search for one attribute as a string.
:param basedn: BaseDN for the search.
:param attribute: Name of the attribute
:param expression: Optional search expression.
:param scope: Search scope (defaults to base).
:return: Value of attribute as a string or None if it wasn't found.
"""
res = self.search(basedn, scope, expression, [attribute])
if len(res) != 1 or res[0][attribute] is None:
return None
values = set(res[0][attribute])
assert len(values) == 1
return self.schema_format_value(attribute, values.pop())
def erase_users_computers(self, dn):
"""Erases user and computer objects from our AD.
This is needed since the 'samldb' module denies the deletion of primary
groups. Therefore all groups shouldn't be primary somewhere anymore.
"""
try:
res = self.search(base=dn, scope=ldb.SCOPE_SUBTREE, attrs=[],
expression="(|(objectclass=user)(objectclass=computer))")
except ldb.LdbError, (errno, _):
if errno == ldb.ERR_NO_SUCH_OBJECT:
# Ignore no such object errors
return
else:
raise
try:
for msg in res:
self.delete(msg.dn, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore no such object errors
raise
def erase_except_schema_controlled(self):
"""Erase this ldb.
:note: Removes all records, except those that are controlled by
Samba4's schema.
"""
basedn = ""
# Try to delete user/computer accounts to allow deletion of groups
self.erase_users_computers(basedn)
# Delete the 'visible' records, and the invisble 'deleted' records (if this DB supports it)
for msg in self.search(basedn, ldb.SCOPE_SUBTREE,
"(&(|(objectclass=*)(distinguishedName=*))(!(distinguishedName=@BASEINFO)))",
[], controls=["show_deleted:0", "show_recycled:0"]):
try:
self.delete(msg.dn, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore no such object errors
raise
res = self.search(basedn, ldb.SCOPE_SUBTREE,
"(&(|(objectclass=*)(distinguishedName=*))(!(distinguishedName=@BASEINFO)))", [], controls=["show_deleted:0", "show_recycled:0"])
assert len(res) == 0
# delete the specials
for attr in ["@SUBCLASSES", "@MODULES",
"@OPTIONS", "@PARTITION", "@KLUDGEACL"]:
try:
self.delete(attr, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore missing dn errors
raise
def erase(self):
"""Erase this ldb, removing all records."""
self.erase_except_schema_controlled()
# delete the specials
for attr in ["@INDEXLIST", "@ATTRIBUTES"]:
try:
self.delete(attr, ["relax:0"])
except ldb.LdbError, (errno, _):
if errno != ldb.ERR_NO_SUCH_OBJECT:
# Ignore missing dn errors
raise
def load_ldif_file_add(self, ldif_path):
"""Load a LDIF file.
:param ldif_path: Path to LDIF file.
"""
self.add_ldif(open(ldif_path, 'r').read())
def add_ldif(self, ldif, controls=None):
"""Add data based on a LDIF string.
:param ldif: LDIF text.
"""
for changetype, msg in self.parse_ldif(ldif):
assert changetype == ldb.CHANGETYPE_NONE
self.add(msg, controls)
def modify_ldif(self, ldif, controls=None):
"""Modify database based on a LDIF string.
:param ldif: LDIF text.
"""
for changetype, msg in self.parse_ldif(ldif):
if changetype == ldb.CHANGETYPE_ADD:
self.add(msg, controls)
else:
self.modify(msg, controls)
def substitute_var(text, values):
"""Substitute strings of the form ${NAME} in str, replacing
with substitutions from values.
:param text: Text in which to subsitute.
:param values: Dictionary with keys and values.
"""
for (name, value) in values.items():
assert isinstance(name, str), "%r is not a string" % name
assert isinstance(value, str), "Value %r for %s is not a string" % (value, name)
text = text.replace("${%s}" % name, value)
return text
def check_all_substituted(text):
"""Check that all substitution variables in a string have been replaced.
If not, raise an exception.
:param text: The text to search for substitution variables
"""
if not "${" in text:
return
var_start = text.find("${")
var_end = text.find("}", var_start)
raise Exception("Not all variables substituted: %s" %
text[var_start:var_end+1])
def read_and_sub_file(file_name, subst_vars):
"""Read a file and sub in variables found in it
:param file_name: File to be read (typically from setup directory)
param subst_vars: Optional variables to subsitute in the file.
"""
data = open(file_name, 'r').read()
if subst_vars is not None:
data = substitute_var(data, subst_vars)
check_all_substituted(data)
return data
def setup_file(template, fname, subst_vars=None):
"""Setup a file in the private dir.
:param template: Path of the template file.
:param fname: Path of the file to create.
:param subst_vars: Substitution variables.
"""
if os.path.exists(fname):
os.unlink(fname)
data = read_and_sub_file(template, subst_vars)
f = open(fname, 'w')
try:
f.write(data)
finally:
f.close()
def valid_netbios_name(name):
"""Check whether a name is valid as a NetBIOS name. """
# See crh's book (1.4.1.1)
if len(name) > 15:
return False
for x in name:
if not x.isalnum() and not x in " !#$%&'()-.@^_{}~":
return False
return True
def import_bundled_package(modulename, location):
"""Import the bundled version of a package.
:note: This should only be called if the system version of the package
is not adequate.
:param modulename: Module name to import
:param location: Location to add to sys.path (can be relative to
${srcdir}/lib)
"""
if in_source_tree():
sys.path.insert(0, os.path.join(source_tree_topdir(), "lib", location))
sys.modules[modulename] = __import__(modulename)
else:
sys.modules[modulename] = __import__(
"samba.external.%s" % modulename, fromlist=["samba.external"])
def ensure_external_module(modulename, location):
"""Add a location to sys.path if an external dependency can't be found.
:param modulename: Module name to import
:param location: Location to add to sys.path (can be relative to
${srcdir}/lib)
"""<|fim▁hole|>
from samba import _glue
version = _glue.version
interface_ips = _glue.interface_ips
set_debug_level = _glue.set_debug_level
get_debug_level = _glue.get_debug_level
unix2nttime = _glue.unix2nttime
nttime2string = _glue.nttime2string
nttime2unix = _glue.nttime2unix
unix2nttime = _glue.unix2nttime
generate_random_password = _glue.generate_random_password
strcasecmp_m = _glue.strcasecmp_m
strstr_m = _glue.strstr_m<|fim▁end|>
|
try:
__import__(modulename)
except ImportError:
import_bundled_package(modulename, location)
|
<|file_name|>test_base_document.py<|end_file_name|><|fim▁begin|>import unittest
<|fim▁hole|>class TestBaseDocument(unittest.TestCase):
def test_docstatus(self):
doc = BaseDocument({"docstatus": 0})
self.assertTrue(doc.docstatus.is_draft())
self.assertEquals(doc.docstatus, 0)
doc.docstatus = 1
self.assertTrue(doc.docstatus.is_submitted())
self.assertEquals(doc.docstatus, 1)
doc.docstatus = 2
self.assertTrue(doc.docstatus.is_cancelled())
self.assertEquals(doc.docstatus, 2)<|fim▁end|>
|
from frappe.model.base_document import BaseDocument
|
<|file_name|>webpack.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// tslint:disable
// TODO: cleanup this file, it's copied as is from Angular CLI.<|fim▁hole|>export { BundleBudgetPlugin, BundleBudgetPluginOptions } from './bundle-budget';
export { ScriptsWebpackPlugin, ScriptsWebpackPluginOptions } from './scripts-webpack-plugin';
export { SuppressExtractedTextChunksWebpackPlugin } from './suppress-entry-chunks-webpack-plugin';
export {
default as PostcssCliResources,
PostcssCliResourcesOptions,
} from './postcss-cli-resources';
import { join } from 'path';
export const RawCssLoader = require.resolve(join(__dirname, 'raw-css-loader'));<|fim▁end|>
|
// Exports the webpack plugins we use internally.
export { BaseHrefWebpackPlugin } from '../lib/base-href-webpack/base-href-webpack-plugin';
export { CleanCssWebpackPlugin, CleanCssWebpackPluginOptions } from './cleancss-webpack-plugin';
|
<|file_name|>base_transform.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
API Issues to work out:
- MatrixTransform and STTransform both have 'scale' and 'translate'
attributes, but they are used in very different ways. It would be nice
to keep this consistent, but how?
- Need a transform.map_rect function that returns the bounding rectangle of
a rect after transformation. Non-linear transforms might need to work
harder at this, but we can provide a default implementation that
works by mapping a selection of points across a grid within the original
rect.
"""
from __future__ import division
from ..shaders import Function
from ...util.event import EventEmitter
class BaseTransform(object):
"""
BaseTransform is a base class that defines a pair of complementary
coordinate mapping functions in both python and GLSL.
All BaseTransform subclasses define map() and imap() methods that map
an object through the forward or inverse transformation, respectively.
The two class variables glsl_map and glsl_imap are instances of
shaders.Function that define the forward- and inverse-mapping GLSL<|fim▁hole|> inverse mapping.
Note that although all classes should define both map() and imap(), it
is not necessarily the case that imap(map(x)) == x; there may be instances
where the inverse mapping is ambiguous or otherwise meaningless.
"""
glsl_map = None # Must be GLSL code
glsl_imap = None
# Flags used to describe the transformation. Subclasses should define each
# as True or False.
# (usually used for making optimization decisions)
# If True, then for any 3 colinear points, the
# transformed points will also be colinear.
Linear = None
# The transformation's effect on one axis is independent
# of the input position along any other axis.
Orthogonal = None
# If True, then the distance between two points is the
# same as the distance between the transformed points.
NonScaling = None
# Scale factors are applied equally to all axes.
Isometric = None
def __init__(self):
self._inverse = None
self._dynamic = False
self.changed = EventEmitter(source=self, type='transform_changed')
if self.glsl_map is not None:
self._shader_map = Function(self.glsl_map)
if self.glsl_imap is not None:
self._shader_imap = Function(self.glsl_imap)
def map(self, obj):
"""
Return *obj* mapped through the forward transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
def imap(self, obj):
"""
Return *obj* mapped through the inverse transformation.
Parameters
----------
obj : tuple (x,y) or (x,y,z)
array with shape (..., 2) or (..., 3)
"""
raise NotImplementedError()
@property
def inverse(self):
""" The inverse of this transform.
"""
if self._inverse is None:
self._inverse = InverseTransform(self)
return self._inverse
@property
def dynamic(self):
"""Boolean flag that indicates whether this transform is expected to
change frequently.
Transforms that are flagged as dynamic will not be collapsed in
``ChainTransform.simplified``. This allows changes to the transform
to propagate through the chain without requiring the chain to be
re-simplified.
"""
return self._dynamic
@dynamic.setter
def dynamic(self, d):
self._dynamic = d
def shader_map(self):
"""
Return a shader Function that accepts only a single vec4 argument
and defines new attributes / uniforms supplying the Function with
any static input.
"""
return self._shader_map
def shader_imap(self):
"""
see shader_map.
"""
return self._shader_imap
def _shader_object(self):
""" This method allows transforms to be assigned directly to shader
template variables.
Example::
code = 'void main() { gl_Position = $transform($position); }'
func = shaders.Function(code)
tr = STTransform()
func['transform'] = tr # use tr's forward mapping for $function
"""
return self.shader_map()
def update(self, *args):
"""
Called to inform any listeners that this transform has changed.
"""
self.changed(*args)
def __mul__(self, tr):
"""
Transform multiplication returns a new transform that is equivalent to
the two operands performed in series.
By default, multiplying two Transforms `A * B` will return
ChainTransform([A, B]). Subclasses may redefine this operation to
return more optimized results.
To ensure that both operands have a chance to simplify the operation,
all subclasses should follow the same procedure. For `A * B`:
1. A.__mul__(B) attempts to generate an optimized transform product.
2. If that fails, it must:
* return super(A).__mul__(B) OR
* return NotImplemented if the superclass would return an
invalid result.
3. When BaseTransform.__mul__(A, B) is called, it returns
NotImplemented, which causes B.__rmul__(A) to be invoked.
4. B.__rmul__(A) attempts to generate an optimized transform product.
5. If that fails, it must:
* return super(B).__rmul__(A) OR
* return ChainTransform([B, A]) if the superclass would return
an invalid result.
6. When BaseTransform.__rmul__(B, A) is called, ChainTransform([A, B])
is returned.
"""
# switch to __rmul__ attempts.
# Don't use the "return NotImplemted" trick, because that won't work if
# self and tr are of the same type.
return tr.__rmul__(self)
def __rmul__(self, tr):
return ChainTransform([tr, self])
def __repr__(self):
return "<%s at 0x%x>" % (self.__class__.__name__, id(self))
def __del__(self):
# we can remove ourselves from *all* events in this situation.
self.changed.disconnect()
class InverseTransform(BaseTransform):
def __init__(self, transform):
BaseTransform.__init__(self)
self._inverse = transform
self.map = transform.imap
self.imap = transform.map
@property
def Linear(self):
return self._inverse.Linear
@property
def Orthogonal(self):
return self._inverse.Orthogonal
@property
def NonScaling(self):
return self._inverse.NonScaling
@property
def Isometric(self):
return self._inverse.Isometric
@property
def shader_map(self):
return self._inverse.shader_imap
@property
def shader_imap(self):
return self._inverse.shader_map
def __repr__(self):
return ("<Inverse of %r>" % repr(self._inverse))
# import here to avoid import cycle; needed for BaseTransform.__mul__.
from .chain import ChainTransform # noqa<|fim▁end|>
|
function code.
Optionally, an inverse() method returns a new transform performing the
|
<|file_name|>test_task.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
HDL Testing Platform
REST API for HDL TP # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.task import Task # noqa: E501
from swagger_client.rest import ApiException<|fim▁hole|>
class TestTask(unittest.TestCase):
"""Task unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testTask(self):
"""Test Task"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.task.Task() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()<|fim▁end|>
| |
<|file_name|>AuditLogModel.java<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2017-2019 Arrow Electronics, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License 2.0
* which accompanies this distribution, and is available at
* http://apache.org/licenses/LICENSE-2.0
*
* Contributors:
* Arrow Electronics, Inc.
* Konexios, Inc.
*/
package com.konexios.api.models;
import android.os.Parcel;
import android.os.Parcelable;
import androidx.annotation.NonNull;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.Objects;
public class AuditLogModel implements Parcelable {
@SuppressWarnings("unused")
public static final Parcelable.Creator<AuditLogModel> CREATOR = new Parcelable.Creator<AuditLogModel>() {
@NonNull
@Override
public AuditLogModel createFromParcel(@NonNull Parcel in) {
return new AuditLogModel(in);
}
@NonNull
@Override
public AuditLogModel[] newArray(int size) {
return new AuditLogModel[size];
}
};
@SerializedName("createdBy")
@Expose
private String createdBy;
@SerializedName("createdString")
@Expose
private String createdString;
@SerializedName("objectHid")
@Expose
private String objectHid;
@SerializedName("parameters")
@Expose
private JsonElement parameters;
@SerializedName("productName")
@Expose
private String productName;
@SerializedName("type")
@Expose
private String type;
public AuditLogModel() {
}
protected AuditLogModel(@NonNull Parcel in) {
createdBy = in.readString();
createdString = (String) in.readValue(String.class.getClassLoader());
objectHid = in.readString();
JsonParser parser = new JsonParser();
parameters = parser.parse(in.readString()).getAsJsonObject();
productName = in.readString();
type = in.readString();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
AuditLogModel that = (AuditLogModel) o;
return Objects.equals(createdBy, that.createdBy) &&
Objects.equals(createdString, that.createdString) &&
Objects.equals(objectHid, that.objectHid) &&
Objects.equals(parameters, that.parameters) &&
Objects.equals(productName, that.productName) &&
Objects.equals(type, that.type);
}
@Override
public int hashCode() {
return Objects.hash(createdBy, createdString, objectHid, parameters, productName, type);
}
/**
* @return The createdBy
*/
public String getCreatedBy() {
return createdBy;
}
/**
* @param createdBy The createdBy
*/
public void setCreatedBy(String createdBy) {
this.createdBy = createdBy;
}
/**
* @return The createdString
*/
public String getCreatedString() {
return createdString;
}
/**
* @param String The String
*/
public void setCreatedString(String String) {
this.createdString = String;
}
/**
* @return The objectHid
*/
public String getObjectHid() {
return objectHid;
}
/**<|fim▁hole|> }
/**
* @return The parameters
*/
public JsonElement getParameters() {
if (parameters == null) {
parameters = new JsonObject();
}
return parameters;
}
/**
* @param parameters The parameters
*/
public void setParameters(JsonElement parameters) {
this.parameters = parameters;
}
/**
* @return The productName
*/
public String getProductName() {
return productName;
}
/**
* @param productName The productName
*/
public void setProductName(String productName) {
this.productName = productName;
}
/**
* @return The type
*/
public String getType() {
return type;
}
/**
* @param type The type
*/
public void setType(String type) {
this.type = type;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(@NonNull Parcel dest, int flags) {
dest.writeString(createdBy);
dest.writeValue(createdString);
dest.writeString(objectHid);
String str = new Gson().toJson(getParameters());
dest.writeString(str);
dest.writeString(productName);
dest.writeString(type);
}
}<|fim▁end|>
|
* @param objectHid The objectHid
*/
public void setObjectHid(String objectHid) {
this.objectHid = objectHid;
|
<|file_name|>parser.hpp<|end_file_name|><|fim▁begin|>#ifndef _PARSER_HPP
#define _PARSER_HPP
#include <cassert>
#include <iostream>
#include <vector>
#include <string>
#include <cstdlib>
#include "toyobj.hpp"
#include "lexer.hpp"
#include "toy.hpp"
#include "ast.hpp"
class ParserContext {
public:
explicit ParserContext(LexerContext &lexer)
: lexer_(lexer) { lexer_.fetchtok(); }
AST *parse_ast(bool);
private:
Statement *parse_statement();
Statement *parse_while();
Statement *parse_if();
Statement *parse_return();
Statement *parse_def();
Expression *parse_expression();
Expression *parse_primary();
Expression *parse_binary_op_expression(Expression*, int);<|fim▁hole|> Expression *parse_paren_expression();
Expression *parse_number();
Expression *parse_string();
Expression *parse_word_expression();
AST *parse_block();
int get_prec(TokenType) const;
inline const Token *curtok() { return lexer_.curtok(); }
inline void eat_token(TokenType type) {
if (type != curtok()->type()) {
std::cout << "I was expecting " << Token::token_type_name(type) << " but got " << curtok()->name() << std::endl;
exit(1);
}
lexer_.fetchtok();
}
LexerContext &lexer_;
DISALLOW_COPY_AND_ASSIGN(ParserContext);
};
#endif<|fim▁end|>
| |
<|file_name|>border-base.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, '__esModule', {
value: true
});
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { 'default': obj }; }
var _postcss = require('postcss');
var _clone = require('../clone');
var _clone2 = _interopRequireDefault(_clone);
var _hasAllProps = require('../hasAllProps');
var _hasAllProps2 = _interopRequireDefault(_hasAllProps);
var _getLastNode = require('../getLastNode');
var _getLastNode2 = _interopRequireDefault(_getLastNode);
var _canMerge = require('../canMerge');
var _canMerge2 = _interopRequireDefault(_canMerge);
exports['default'] = function (direction) {
var wsc = ['width', 'style', 'color'].map(function (d) {
return 'border-' + direction + '-' + d;
});
var defaults = ['medium', 'none', 'currentColor'];
var declaration = 'border-' + direction;
var processor = {
explode: function explode(rule) {
rule.walkDecls(declaration, function (decl) {
var values = _postcss.list.space(decl.value);
wsc.forEach(function (prop, index) {
var node = (0, _clone2['default'])(decl);
node.prop = prop;
node.value = values[index];
if (node.value === undefined) {
node.value = defaults[index];
}
rule.insertAfter(decl, node);
});
decl.remove();
});
},
merge: function merge(rule) {
var decls = rule.nodes.filter(function (node) {
return node.prop && ~wsc.indexOf(node.prop);
});
var _loop = function () {
var lastNode = decls[decls.length - 1];
var props = decls.filter(function (node) {
return node.important === lastNode.important;
});
if (_hasAllProps2['default'].apply(undefined, [props].concat(wsc)) && _canMerge2['default'].apply(undefined, props)) {
var values = wsc.map(function (prop) {
return (0, _getLastNode2['default'])(props, prop).value;
});
var value = values.concat(['']).reduceRight(function (prev, cur, i) {
if (prev === '' && cur === defaults[i]) {
return prev;
}<|fim▁hole|> }
var shorthand = (0, _clone2['default'])(lastNode);
shorthand.prop = declaration;
shorthand.value = value;
rule.insertAfter(lastNode, shorthand);
props.forEach(function (prop) {
return prop.remove();
});
}
decls = decls.filter(function (node) {
return ! ~props.indexOf(node);
});
};
while (decls.length) {
_loop();
}
}
};
return processor;
};
module.exports = exports['default'];<|fim▁end|>
|
return cur + " " + prev;
}).trim();
if (value === '') {
value = defaults[0];
|
<|file_name|>setup_wizard.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe, json, copy
from frappe.utils import cstr, flt, getdate
from frappe import _
from frappe.utils.file_manager import save_file
from frappe.translate import (set_default_language, get_dict,
get_lang_dict, send_translations, get_language_from_code)
from frappe.geo.country_info import get_country_info
from frappe.utils.nestedset import get_root_of
from .default_website import website_maker
import install_fixtures
from .sample_data import make_sample_data
from erpnext.accounts.utils import FiscalYearError
@frappe.whitelist()
def setup_account(args=None):
try:
if frappe.db.sql("select name from tabCompany"):
frappe.throw(_("Setup Already Complete!!"))
if not args:
args = frappe.local.form_dict
if isinstance(args, basestring):
args = json.loads(args)
args = frappe._dict(args)
if args.language and args.language != "english":
set_default_language(args.language)
frappe.clear_cache()
install_fixtures.install(args.get("country"))
update_user_name(args)
frappe.local.message_log = []
create_fiscal_year_and_company(args)
frappe.local.message_log = []
create_users(args)
frappe.local.message_log = []
set_defaults(args)
frappe.local.message_log = []
create_territories()
frappe.local.message_log = []
create_price_lists(args)
frappe.local.message_log = []
create_feed_and_todo()
frappe.local.message_log = []
create_email_digest()
frappe.local.message_log = []
create_letter_head(args)
frappe.local.message_log = []
create_taxes(args)
frappe.local.message_log = []
create_items(args)
frappe.local.message_log = []
create_customers(args)
frappe.local.message_log = []
create_suppliers(args)
frappe.local.message_log = []
frappe.db.set_default('desktop:home_page', 'desktop')
website_maker(args.company_name.strip(), args.company_tagline, args.name)
create_logo(args)
frappe.db.commit()
login_as_first_user(args)
frappe.db.commit()
frappe.clear_cache()
if args.get("add_sample_data"):
try:
make_sample_data()
except FiscalYearError:
pass
except:
if args:
traceback = frappe.get_traceback()
for hook in frappe.get_hooks("setup_wizard_exception"):
frappe.get_attr(hook)(traceback, args)
raise
else:
for hook in frappe.get_hooks("setup_wizard_success"):
frappe.get_attr(hook)(args)
def update_user_name(args):
if args.get("email"):
args['name'] = args.get("email")
frappe.flags.mute_emails = True
doc = frappe.get_doc({
"doctype":"User",
"email": args.get("email"),
"first_name": args.get("first_name"),
"last_name": args.get("last_name")
})
doc.flags.no_welcome_mail = True
doc.insert()
frappe.flags.mute_emails = False
from frappe.auth import _update_password
_update_password(args.get("email"), args.get("password"))
else:
args['name'] = frappe.session.user
# Update User
if not args.get('last_name') or args.get('last_name')=='None':
args['last_name'] = None
frappe.db.sql("""update `tabUser` SET first_name=%(first_name)s,
last_name=%(last_name)s WHERE name=%(name)s""", args)
if args.get("attach_user"):
attach_user = args.get("attach_user").split(",")
if len(attach_user)==3:
filename, filetype, content = attach_user
fileurl = save_file(filename, content, "User", args.get("name"), decode=True).file_url
frappe.db.set_value("User", args.get("name"), "user_image", fileurl)
add_all_roles_to(args.get("name"))
def create_fiscal_year_and_company(args):
curr_fiscal_year = get_fy_details(args.get('fy_start_date'), args.get('fy_end_date'))
frappe.get_doc({
"doctype":"Fiscal Year",
'year': curr_fiscal_year,
'year_start_date': args.get('fy_start_date'),
'year_end_date': args.get('fy_end_date'),
}).insert()
# Company
frappe.get_doc({
"doctype":"Company",
'domain': args.get("industry"),
'company_name':args.get('company_name').strip(),
'abbr':args.get('company_abbr'),
'default_currency':args.get('currency'),
'country': args.get('country'),
'chart_of_accounts': args.get(('chart_of_accounts')),
}).insert()
# Bank Account
args["curr_fiscal_year"] = curr_fiscal_year
def create_price_lists(args):
for pl_type, pl_name in (("Selling", _("Standard Selling")), ("Buying", _("Standard Buying"))):
frappe.get_doc({
"doctype": "Price List",
"price_list_name": pl_name,
"enabled": 1,
"buying": 1 if pl_type == "Buying" else 0,
"selling": 1 if pl_type == "Selling" else 0,
"currency": args["currency"],
"territories": [{
"territory": get_root_of("Territory")
}]
}).insert()
def set_defaults(args):
# enable default currency
frappe.db.set_value("Currency", args.get("currency"), "enabled", 1)
global_defaults = frappe.get_doc("Global Defaults", "Global Defaults")
global_defaults.update({
'current_fiscal_year': args.curr_fiscal_year,
'default_currency': args.get('currency'),
'default_company':args.get('company_name').strip(),
"country": args.get("country"),
})
global_defaults.save()
number_format = get_country_info(args.get("country")).get("number_format", "#,###.##")
# replace these as float number formats, as they have 0 precision
# and are currency number formats and not for floats
if number_format=="#.###":
number_format = "#.###,##"
elif number_format=="#,###":
number_format = "#,###.##"
system_settings = frappe.get_doc("System Settings", "System Settings")
system_settings.update({
"language": args.get("language"),
"time_zone": args.get("timezone"),
"float_precision": 3,
"email_footer_address": args.get("company"),
'date_format': frappe.db.get_value("Country", args.get("country"), "date_format"),
'number_format': number_format,
'enable_scheduler': 1 if not frappe.flags.in_test else 0
})
system_settings.save()
accounts_settings = frappe.get_doc("Accounts Settings")
accounts_settings.auto_accounting_for_stock = 1
accounts_settings.save()
stock_settings = frappe.get_doc("Stock Settings")
stock_settings.item_naming_by = "Item Code"
stock_settings.valuation_method = "FIFO"
stock_settings.stock_uom = _("Nos")
stock_settings.auto_indent = 1
stock_settings.auto_insert_price_list_rate_if_missing = 1
stock_settings.save()
selling_settings = frappe.get_doc("Selling Settings")
selling_settings.cust_master_name = "Customer Name"
selling_settings.so_required = "No"
selling_settings.dn_required = "No"
selling_settings.save()
buying_settings = frappe.get_doc("Buying Settings")
buying_settings.supp_master_name = "Supplier Name"
buying_settings.po_required = "No"
buying_settings.pr_required = "No"
buying_settings.maintain_same_rate = 1
buying_settings.save()
notification_control = frappe.get_doc("Notification Control")
notification_control.quotation = 1
notification_control.sales_invoice = 1
notification_control.purchase_order = 1
notification_control.save()
hr_settings = frappe.get_doc("HR Settings")
hr_settings.emp_created_by = "Naming Series"
hr_settings.save()
def create_feed_and_todo():
"""update Activity feed and create todo for creation of item, customer, vendor"""
frappe.get_doc({
"doctype": "Feed",
"feed_type": "Comment",
"subject": "ERPNext Setup Complete!"
}).insert(ignore_permissions=True)
def create_email_digest():
from frappe.utils.user import get_system_managers
system_managers = get_system_managers(only_name=True)
if not system_managers:
return
companies = frappe.db.sql_list("select name FROM `tabCompany`")
for company in companies:
if not frappe.db.exists("Email Digest", "Default Weekly Digest - " + company):
edigest = frappe.get_doc({
"doctype": "Email Digest",
"name": "Default Weekly Digest - " + company,
"company": company,
"frequency": "Weekly",
"recipient_list": "\n".join(system_managers)
})
for df in edigest.meta.get("fields", {"fieldtype": "Check"}):
if df.fieldname != "scheduler_errors":
edigest.set(df.fieldname, 1)
edigest.insert()
# scheduler errors digest
if companies:
edigest = frappe.new_doc("Email Digest")
edigest.update({
"name": "Scheduler Errors",
"company": companies[0],
"frequency": "Daily",
"recipient_list": "\n".join(system_managers),
"scheduler_errors": 1,
"enabled": 1
})
edigest.insert()
def get_fy_details(fy_start_date, fy_end_date):
start_year = getdate(fy_start_date).year
if start_year == getdate(fy_end_date).year:
fy = cstr(start_year)
else:
fy = cstr(start_year) + '-' + cstr(start_year + 1)
return fy
def create_taxes(args):
for i in xrange(1,6):
if args.get("tax_" + str(i)):
# replace % in case someone also enters the % symbol
tax_rate = (args.get("tax_rate_" + str(i)) or "").replace("%", "")
try:
tax_group = frappe.db.get_value("Account", {"company": args.get("company_name"),
"is_group": 1, "account_type": "Tax", "root_type": "Liability"})
if tax_group:
account = make_tax_head(args, i, tax_group, tax_rate)
make_sales_and_purchase_tax_templates(account)
except frappe.NameError, e:
if e.args[2][0]==1062:
pass
else:
raise
def make_tax_head(args, i, tax_group, tax_rate):
return frappe.get_doc({
"doctype":"Account",<|fim▁hole|> "account_name": args.get("tax_" + str(i)),
"is_group": 0,
"report_type": "Balance Sheet",
"account_type": "Tax",
"tax_rate": flt(tax_rate) if tax_rate else None
}).insert(ignore_permissions=True)
def make_sales_and_purchase_tax_templates(account):
doc = {
"doctype": "Sales Taxes and Charges Template",
"title": account.name,
"taxes": [{
"category": "Valuation and Total",
"charge_type": "On Net Total",
"account_head": account.name,
"description": "{0} @ {1}".format(account.account_name, account.tax_rate),
"rate": account.tax_rate
}]
}
# Sales
frappe.get_doc(copy.deepcopy(doc)).insert()
# Purchase
doc["doctype"] = "Purchase Taxes and Charges Template"
frappe.get_doc(copy.deepcopy(doc)).insert()
def create_items(args):
for i in xrange(1,6):
item = args.get("item_" + str(i))
if item:
item_group = args.get("item_group_" + str(i))
is_sales_item = args.get("is_sales_item_" + str(i))
is_purchase_item = args.get("is_purchase_item_" + str(i))
is_stock_item = item_group!=_("Services")
default_warehouse = ""
if is_stock_item:
default_warehouse = frappe.db.get_value("Warehouse", filters={
"warehouse_name": _("Finished Goods") if is_sales_item else _("Stores"),
"company": args.get("company_name").strip()
})
try:
frappe.get_doc({
"doctype":"Item",
"item_code": item,
"item_name": item,
"description": item,
"is_sales_item": 1 if is_sales_item else 0,
"is_purchase_item": 1 if is_purchase_item else 0,
"show_in_website": 1,
"is_stock_item": is_stock_item and 1 or 0,
"item_group": item_group,
"stock_uom": args.get("item_uom_" + str(i)),
"default_warehouse": default_warehouse
}).insert()
if args.get("item_img_" + str(i)):
item_image = args.get("item_img_" + str(i)).split(",")
if len(item_image)==3:
filename, filetype, content = item_image
fileurl = save_file(filename, content, "Item", item, decode=True).file_url
frappe.db.set_value("Item", item, "image", fileurl)
if args.get("item_price_" + str(i)):
item_price = flt(args.get("item_price_" + str(i)))
if is_sales_item:
price_list_name = frappe.db.get_value("Price List", {"selling": 1})
make_item_price(item, price_list_name, item_price)
if is_purchase_item:
price_list_name = frappe.db.get_value("Price List", {"buying": 1})
make_item_price(item, price_list_name, item_price)
except frappe.NameError:
pass
def make_item_price(item, price_list_name, item_price):
frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list_name,
"item_code": item,
"price_list_rate": item_price
}).insert()
def create_customers(args):
for i in xrange(1,6):
customer = args.get("customer_" + str(i))
if customer:
try:
frappe.get_doc({
"doctype":"Customer",
"customer_name": customer,
"customer_type": "Company",
"customer_group": _("Commercial"),
"territory": args.get("country"),
"company": args.get("company_name").strip()
}).insert()
if args.get("customer_contact_" + str(i)):
create_contact(args.get("customer_contact_" + str(i)),
"customer", customer)
except frappe.NameError:
pass
def create_suppliers(args):
for i in xrange(1,6):
supplier = args.get("supplier_" + str(i))
if supplier:
try:
frappe.get_doc({
"doctype":"Supplier",
"supplier_name": supplier,
"supplier_type": _("Local"),
"company": args.get("company_name").strip()
}).insert()
if args.get("supplier_contact_" + str(i)):
create_contact(args.get("supplier_contact_" + str(i)),
"supplier", supplier)
except frappe.NameError:
pass
def create_contact(contact, party_type, party):
"""Create contact based on given contact name"""
contact = contact.strip().split(" ")
frappe.get_doc({
"doctype":"Contact",
party_type: party,
"first_name":contact[0],
"last_name": len(contact) > 1 and contact[1] or ""
}).insert()
def create_letter_head(args):
if args.get("attach_letterhead"):
frappe.get_doc({
"doctype":"Letter Head",
"letter_head_name": _("Standard"),
"is_default": 1
}).insert()
attach_letterhead = args.get("attach_letterhead").split(",")
if len(attach_letterhead)==3:
filename, filetype, content = attach_letterhead
fileurl = save_file(filename, content, "Letter Head", _("Standard"), decode=True).file_url
frappe.db.set_value("Letter Head", _("Standard"), "content", "<img src='%s' style='max-width: 100%%;'>" % fileurl)
def create_logo(args):
if args.get("attach_logo"):
attach_logo = args.get("attach_logo").split(",")
if len(attach_logo)==3:
filename, filetype, content = attach_logo
fileurl = save_file(filename, content, "Website Settings", "Website Settings",
decode=True).file_url
frappe.db.set_value("Website Settings", "Website Settings", "brand_html",
"<img src='{0}' style='max-width: 40px; max-height: 25px;'> {1}".format(fileurl, args.get("company_name").strip()))
def add_all_roles_to(name):
user = frappe.get_doc("User", name)
for role in frappe.db.sql("""select name from tabRole"""):
if role[0] not in ["Administrator", "Guest", "All", "Customer", "Supplier", "Partner", "Employee"]:
d = user.append("user_roles")
d.role = role[0]
user.save()
def create_territories():
"""create two default territories, one for home country and one named Rest of the World"""
from frappe.utils.nestedset import get_root_of
country = frappe.db.get_default("country")
root_territory = get_root_of("Territory")
for name in (country, _("Rest Of The World")):
if name and not frappe.db.exists("Territory", name):
frappe.get_doc({
"doctype": "Territory",
"territory_name": name.replace("'", ""),
"parent_territory": root_territory,
"is_group": "No"
}).insert()
def login_as_first_user(args):
if args.get("email") and hasattr(frappe.local, "login_manager"):
frappe.local.login_manager.login_as(args.get("email"))
def create_users(args):
# create employee for self
emp = frappe.get_doc({
"doctype": "Employee",
"full_name": " ".join(filter(None, [args.get("first_name"), args.get("last_name")])),
"user_id": frappe.session.user,
"status": "Active",
"company": args.get("company_name")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
for i in xrange(1,5):
email = args.get("user_email_" + str(i))
fullname = args.get("user_fullname_" + str(i))
if email:
if not fullname:
fullname = email.split("@")[0]
parts = fullname.split(" ", 1)
user = frappe.get_doc({
"doctype": "User",
"email": email,
"first_name": parts[0],
"last_name": parts[1] if len(parts) > 1 else "",
"enabled": 1,
"user_type": "System User"
})
# default roles
user.append_roles("Projects User", "Stock User", "Support Team")
if args.get("user_sales_" + str(i)):
user.append_roles("Sales User", "Sales Manager", "Accounts User")
if args.get("user_purchaser_" + str(i)):
user.append_roles("Purchase User", "Purchase Manager", "Accounts User")
if args.get("user_accountant_" + str(i)):
user.append_roles("Accounts Manager", "Accounts User")
user.flags.delay_emails = True
if not frappe.db.get_value("User", email):
user.insert(ignore_permissions=True)
# create employee
emp = frappe.get_doc({
"doctype": "Employee",
"full_name": fullname,
"user_id": email,
"status": "Active",
"company": args.get("company_name")
})
emp.flags.ignore_mandatory = True
emp.insert(ignore_permissions = True)
@frappe.whitelist()
def load_messages(language):
frappe.clear_cache()
set_default_language(language)
m = get_dict("page", "setup-wizard")
m.update(get_dict("boot"))
send_translations(m)
return frappe.local.lang
@frappe.whitelist()
def load_languages():
return {
"default_language": get_language_from_code(frappe.local.lang),
"languages": sorted(get_lang_dict().keys())
}<|fim▁end|>
|
"company": args.get("company_name").strip(),
"parent_account": tax_group,
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""
Generic relations
Generic relations let an object have a foreign key to any object through a
content-type/object-id field. A ``GenericForeignKey`` field can point to any
object, be it animal, vegetable, or mineral.
The canonical example is tags (although this example implementation is *far*
from complete).
"""
from __future__ import unicode_literals
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class TaggedItem(models.Model):
"""A tag on an item."""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey()
class Meta:
ordering = ["tag", "content_type__model"]
def __str__(self):
return self.tag
class ValuableTaggedItem(TaggedItem):
value = models.PositiveIntegerField()
class AbstractComparison(models.Model):
comparative = models.CharField(max_length=50)
content_type1 = models.ForeignKey(ContentType, models.CASCADE, related_name="comparative1_set")
object_id1 = models.PositiveIntegerField()
first_obj = GenericForeignKey(ct_field="content_type1", fk_field="object_id1")
@python_2_unicode_compatible
class Comparison(AbstractComparison):
"""
A model that tests having multiple GenericForeignKeys. One is defined
through an inherited abstract model and one defined directly on this class.
"""
content_type2 = models.ForeignKey(ContentType, models.CASCADE, related_name="comparative2_set")
object_id2 = models.PositiveIntegerField()
other_obj = GenericForeignKey(ct_field="content_type2", fk_field="object_id2")
def __str__(self):
return "%s is %s than %s" % (self.first_obj, self.comparative, self.other_obj)
@python_2_unicode_compatible
class Animal(models.Model):
common_name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
tags = GenericRelation(TaggedItem, related_query_name='animal')
comparisons = GenericRelation(Comparison,
object_id_field="object_id1",
content_type_field="content_type1")
def __str__(self):
return self.common_name
@python_2_unicode_compatible
class Vegetable(models.Model):
name = models.CharField(max_length=150)
is_yucky = models.BooleanField(default=True)
tags = GenericRelation(TaggedItem)
def __str__(self):
return self.name
<|fim▁hole|>@python_2_unicode_compatible
class Mineral(models.Model):
name = models.CharField(max_length=150)
hardness = models.PositiveSmallIntegerField()
# note the lack of an explicit GenericRelation here...
def __str__(self):
return self.name
class GeckoManager(models.Manager):
def get_queryset(self):
return super(GeckoManager, self).get_queryset().filter(has_tail=True)
class Gecko(models.Model):
has_tail = models.BooleanField(default=False)
objects = GeckoManager()
# To test fix for #11263
class Rock(Mineral):
tags = GenericRelation(TaggedItem)
class ManualPK(models.Model):
id = models.IntegerField(primary_key=True)
tags = GenericRelation(TaggedItem, related_query_name='manualpk')
class ForProxyModelModel(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
obj = GenericForeignKey(for_concrete_model=False)
title = models.CharField(max_length=255, null=True)
class ForConcreteModelModel(models.Model):
content_type = models.ForeignKey(ContentType, models.CASCADE)
object_id = models.PositiveIntegerField()
obj = GenericForeignKey()
class ConcreteRelatedModel(models.Model):
bases = GenericRelation(ForProxyModelModel, for_concrete_model=False)
class ProxyRelatedModel(ConcreteRelatedModel):
class Meta:
proxy = True
# To test fix for #7551
class AllowsNullGFK(models.Model):
content_type = models.ForeignKey(ContentType, models.SET_NULL, null=True)
object_id = models.PositiveIntegerField(null=True)
content_object = GenericForeignKey()<|fim▁end|>
| |
<|file_name|>4986e64643f4_.py<|end_file_name|><|fim▁begin|>"""empty message
Revision ID: 4986e64643f4
Revises: 175003d01257
Create Date: 2015-04-15 12:16:41.965765
"""
# revision identifiers, used by Alembic.
revision = '4986e64643f4'<|fim▁hole|>down_revision = '175003d01257'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('tile', sa.Column('feature_id', sa.String(), nullable=False))
op.alter_column('tile', 'date_acquired',
existing_type=postgresql.TIMESTAMP(),
nullable=False)
op.create_unique_constraint(None, 'tile', ['feature_id'])
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'tile', type_='unique')
op.alter_column('tile', 'date_acquired',
existing_type=postgresql.TIMESTAMP(),
nullable=True)
op.drop_column('tile', 'feature_id')
### end Alembic commands ###<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from psyrun.backend import DistributeBackend, LoadBalancingBackend
from psyrun.mapper import (<|fim▁hole|> map_pspace_hdd_backed)
from psyrun.pspace import Param
from psyrun.scheduler import ImmediateRun, Sqsub
from psyrun.store import DefaultStore, PickleStore
from psyrun.version import version as __version__<|fim▁end|>
|
map_pspace,
map_pspace_parallel,
|
<|file_name|>TextToBinary.PipeStorage.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2016 Google
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and/or associated documentation files (the
// "Materials"), to deal in the Materials without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Materials, and to
// permit persons to whom the Materials are furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Materials.
//
// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS
// KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS
// SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT
// https://www.khronos.org/registry/
//
// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
// IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
// CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
// TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
// MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
#include "TestFixture.h"
#include "gmock/gmock.h"
namespace {
using ::spvtest::MakeInstruction;
using ::testing::Eq;
using OpTypePipeStorageTest = spvtest::TextToBinaryTest;
TEST_F(OpTypePipeStorageTest, OpcodeUnrecognizedInV10) {
EXPECT_THAT(CompileFailure("%res = OpTypePipeStorage", SPV_ENV_UNIVERSAL_1_0),
Eq("Invalid Opcode name 'OpTypePipeStorage'"));
}
TEST_F(OpTypePipeStorageTest, ArgumentCount) {<|fim▁hole|> EXPECT_THAT(
CompiledInstructions("%res = OpTypePipeStorage", SPV_ENV_UNIVERSAL_1_1),
Eq(MakeInstruction(SpvOpTypePipeStorage, {1})));
EXPECT_THAT(CompileFailure("%res = OpTypePipeStorage %1 %2 %3 %4 %5",
SPV_ENV_UNIVERSAL_1_1),
Eq("'=' expected after result id."));
}
using OpConstantPipeStorageTest = spvtest::TextToBinaryTest;
TEST_F(OpConstantPipeStorageTest, OpcodeUnrecognizedInV10) {
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage %2 3 4 5",
SPV_ENV_UNIVERSAL_1_0),
Eq("Invalid Opcode name 'OpConstantPipeStorage'"));
}
TEST_F(OpConstantPipeStorageTest, ArgumentCount) {
EXPECT_THAT(
CompileFailure("OpConstantPipeStorage", SPV_ENV_UNIVERSAL_1_1),
Eq("Expected <result-id> at the beginning of an instruction, found "
"'OpConstantPipeStorage'."));
EXPECT_THAT(
CompileFailure("%1 = OpConstantPipeStorage", SPV_ENV_UNIVERSAL_1_1),
Eq("Expected operand, found end of stream."));
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage %2 3 4",
SPV_ENV_UNIVERSAL_1_1),
Eq("Expected operand, found end of stream."));
EXPECT_THAT(CompiledInstructions("%1 = OpConstantPipeStorage %2 3 4 5",
SPV_ENV_UNIVERSAL_1_1),
Eq(MakeInstruction(SpvOpConstantPipeStorage, {1, 2, 3, 4, 5})));
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage %2 3 4 5 %6 %7",
SPV_ENV_UNIVERSAL_1_1),
Eq("'=' expected after result id."));
}
TEST_F(OpConstantPipeStorageTest, ArgumentTypes) {
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage %2 %3 4 5",
SPV_ENV_UNIVERSAL_1_1),
Eq("Invalid unsigned integer literal: %3"));
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage %2 3 %4 5",
SPV_ENV_UNIVERSAL_1_1),
Eq("Invalid unsigned integer literal: %4"));
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage 2 3 4 5",
SPV_ENV_UNIVERSAL_1_1),
Eq("Expected id to start with %."));
EXPECT_THAT(CompileFailure("%1 = OpConstantPipeStorage %2 3 4 \"ab\"",
SPV_ENV_UNIVERSAL_1_1),
Eq("Invalid unsigned integer literal: \"ab\""));
}
using OpCreatePipeFromPipeStorageTest = spvtest::TextToBinaryTest;
TEST_F(OpCreatePipeFromPipeStorageTest, OpcodeUnrecognizedInV10) {
EXPECT_THAT(CompileFailure("%1 = OpCreatePipeFromPipeStorage %2 %3",
SPV_ENV_UNIVERSAL_1_0),
Eq("Invalid Opcode name 'OpCreatePipeFromPipeStorage'"));
}
TEST_F(OpCreatePipeFromPipeStorageTest, ArgumentCount) {
EXPECT_THAT(
CompileFailure("OpCreatePipeFromPipeStorage", SPV_ENV_UNIVERSAL_1_1),
Eq("Expected <result-id> at the beginning of an instruction, found "
"'OpCreatePipeFromPipeStorage'."));
EXPECT_THAT(
CompileFailure("%1 = OpCreatePipeFromPipeStorage", SPV_ENV_UNIVERSAL_1_1),
Eq("Expected operand, found end of stream."));
EXPECT_THAT(CompileFailure("%1 = OpCreatePipeFromPipeStorage %2 OpNop",
SPV_ENV_UNIVERSAL_1_1),
Eq("Expected operand, found next instruction instead."));
EXPECT_THAT(CompiledInstructions("%1 = OpCreatePipeFromPipeStorage %2 %3",
SPV_ENV_UNIVERSAL_1_1),
Eq(MakeInstruction(SpvOpCreatePipeFromPipeStorage, {1, 2, 3})));
EXPECT_THAT(CompileFailure("%1 = OpCreatePipeFromPipeStorage %2 %3 %4 %5",
SPV_ENV_UNIVERSAL_1_1),
Eq("'=' expected after result id."));
}
TEST_F(OpCreatePipeFromPipeStorageTest, ArgumentTypes) {
EXPECT_THAT(CompileFailure("%1 = OpCreatePipeFromPipeStorage \"\" %3",
SPV_ENV_UNIVERSAL_1_1),
Eq("Expected id to start with %."));
EXPECT_THAT(CompileFailure("%1 = OpCreatePipeFromPipeStorage %2 3",
SPV_ENV_UNIVERSAL_1_1),
Eq("Expected id to start with %."));
}
} // anonymous namespace<|fim▁end|>
|
EXPECT_THAT(
CompileFailure("OpTypePipeStorage", SPV_ENV_UNIVERSAL_1_1),
Eq("Expected <result-id> at the beginning of an instruction, found "
"'OpTypePipeStorage'."));
|
<|file_name|>python_tests.py<|end_file_name|><|fim▁begin|># ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from twitter.common.quantity import Amount, Time
from twitter.pants.targets.python_target import PythonTarget
<|fim▁hole|> def __init__(self, name, sources, resources=None, dependencies=None,
timeout=Amount(2, Time.MINUTES),
soft_dependencies=False):
"""
name / sources / resources / dependencies: See PythonLibrary target
timeout: Amount of time before this test should be considered timed-out
[Default: 2 minutes]
soft_dependencies: Whether or not we should ignore dependency resolution
errors for this test. [Default: False]
"""
self._timeout = timeout
self._soft_dependencies = bool(soft_dependencies)
PythonTarget.__init__(self, name, sources, resources, dependencies)
self.add_label('python')
self.add_label('tests')
@property
def timeout(self):
return self._timeout
class PythonTestSuite(PythonTarget):
def __init__(self, name, dependencies=None):
PythonTarget.__init__(self, name, (), (), dependencies)<|fim▁end|>
|
class PythonTests(PythonTarget):
|
<|file_name|>send_serial.rs<|end_file_name|><|fim▁begin|>use printspool_machine::components::ControllerConfig;
use super::*;
use crate::gcode_parser::parse_gcode;
pub fn send_serial(
effects: &mut Vec<Effect>,
gcode_line: GCodeLine,
context: &mut Context,
is_polling: bool,
) {
// Allow for a byte of spacing between receiving and sending over the serial port
// The choice of 1 byte was arbitrary but sending without a spin lock seems to
// loose GCodes.
// let seconds_per_bit: u64 = (60 * 1000 * 1000 / context.baud_rate).into();
// spin_sleep::sleep(Duration::from_micros(8 * seconds_per_bit));<|fim▁hole|>
let parser_result = parse_gcode(&gcode_line.gcode, context)
.map_err(|err| warn!("{}", err));
let ControllerConfig {
long_running_code_timeout,
fast_code_timeout,
long_running_codes,
blocking_codes,
..
} = &context.controller.model;
let mut duration = fast_code_timeout;
let mut is_blocking = false;
if let Ok(Some((mnemonic, major_number))) = parser_result {
let gcode_macro = format!("{}{}", mnemonic, major_number);
if long_running_codes.contains(&gcode_macro) {
duration = long_running_code_timeout
}
is_blocking = blocking_codes.contains(&gcode_macro)
};
effects.push(Effect::SendSerial(gcode_line));
if is_blocking {
effects.push(
Effect::CancelDelay { key: "tickle_delay".to_string() }
);
} else {
effects.push(
Effect::Delay {
key: "tickle_delay".to_string(),
// TODO: configurable delayFromGreetingToReady
duration: Duration::from_millis(*duration),
event: TickleSerialPort,
},
);
}
}<|fim▁end|>
|
// eprintln!("TX: {:?}", gcode_line.gcode);
context.push_gcode_tx(gcode_line.gcode.clone(), is_polling);
|
<|file_name|>bulkimport_users_common.py<|end_file_name|><|fim▁begin|>import re
from crispy_forms import layout
from django import forms
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.http import HttpResponseRedirect, Http404
from django.utils.translation import gettext_lazy
from cradmin_legacy.crispylayouts import PrimarySubmit
from cradmin_legacy.viewhelpers import formbase
from devilry.devilry_account.models import PermissionGroup
class AbstractTypeInUsersView(formbase.FormView):
users_blob_split_pattern = re.compile(r'[,;\s]+')
create_button_label = gettext_lazy('Save')
template_name = 'devilry_admin/common/abstract-type-in-users.django.html'
def dispatch(self, request, *args, **kwargs):
requestuser_devilryrole = request.cradmin_instance.get_devilryrole_for_requestuser()
if requestuser_devilryrole != PermissionGroup.GROUPTYPE_DEPARTMENTADMIN:
raise Http404()
return super(AbstractTypeInUsersView, self).dispatch(request=request, *args, **kwargs)
def get_backlink_url(self):
raise NotImplementedError()
def get_backlink_label(self):
raise NotImplementedError()
@classmethod
def split_users_blob(cls, users_blob):
"""
Split the given string of users by ``,`` and whitespace.
Returns a set.
"""
users_blob_split = cls.users_blob_split_pattern.split(users_blob)
if len(users_blob_split) == 0:
return []
if users_blob_split[0] == '':
del users_blob_split[0]
if len(users_blob_split) > 0 and users_blob_split[-1] == '':
del users_blob_split[-1]
return set(users_blob_split)
def __get_users_blob_help_text(self):
if settings.CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND:
return gettext_lazy('Type or paste in email addresses separated by comma (","), space or one user on each line.')
else:
return gettext_lazy('Type or paste in usernames separated by comma (","), space or one user on each line.')
def __get_users_blob_placeholder(self):
if settings.CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND:
return gettext_lazy('[email protected]\[email protected]')
else:
return gettext_lazy('jane\njohn')
def get_form_class(self):
users_blob_help_text = self.__get_users_blob_help_text()
class UserImportForm(forms.Form):
users_blob = forms.CharField(
widget=forms.Textarea,
required=True,
help_text=users_blob_help_text
)
def __validate_users_blob_emails(self, emails):
invalid_emails = []
for email in emails:
try:
validate_email(email)
except ValidationError:
invalid_emails.append(email)
if invalid_emails:
self.add_error(
'users_blob',
gettext_lazy('Invalid email addresses: %(emails)s') % {
'emails': ', '.join(sorted(invalid_emails))
}
)
def __validate_users_blob_usernames(self, usernames):
valid_username_pattern = re.compile(
getattr(settings, 'DEVILRY_VALID_USERNAME_PATTERN', r'^[a-z0-9]+$'))
invalid_usernames = []
for username in usernames:
if not valid_username_pattern.match(username):
invalid_usernames.append(username)
if invalid_usernames:
self.add_error(
'users_blob',
gettext_lazy('Invalid usernames: %(usernames)s') % {
'usernames': ', '.join(sorted(invalid_usernames))
}
)
def clean(self):
cleaned_data = super(UserImportForm, self).clean()
users_blob = cleaned_data.get('users_blob', None)
if users_blob:
users = AbstractTypeInUsersView.split_users_blob(users_blob)
if settings.CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND:
self.__validate_users_blob_emails(emails=users)
else:
self.__validate_users_blob_usernames(usernames=users)
self.cleaned_users_set = users
return UserImportForm
def get_field_layout(self):
return [
layout.Div(
layout.Field('users_blob', placeholder=self.__get_users_blob_placeholder()),
css_class='cradmin-globalfields cradmin-legacy-formfield-label-sr-only')
]
def get_buttons(self):
return [
PrimarySubmit('save', self.create_button_label),
]
def get_success_url(self):
return self.request.cradmin_app.reverse_appindexurl()
def import_users_from_emails(self, emails):
raise NotImplementedError()
def import_users_from_usernames(self, usernames):
raise NotImplementedError()
def form_valid(self, form):
if settings.CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND:
self.import_users_from_emails(emails=form.cleaned_users_set)
else:
self.import_users_from_usernames(usernames=form.cleaned_users_set)
return HttpResponseRedirect(str(self.get_success_url()))
def get_context_data(self, **kwargs):
context = super(AbstractTypeInUsersView, self).get_context_data(**kwargs)
context['backlink_url'] = self.get_backlink_url()
context['backlink_label'] = self.get_backlink_label()<|fim▁hole|> return context<|fim▁end|>
|
context['uses_email_auth_backend'] = settings.CRADMIN_LEGACY_USE_EMAIL_AUTH_BACKEND
|
<|file_name|>pagtn.py<|end_file_name|><|fim▁begin|>"""
DGL-based PAGTN for graph property prediction.
"""
import torch.nn as nn
import torch.nn.functional as F
from deepchem.models.losses import Loss, L2Loss, SparseSoftmaxCrossEntropy
from deepchem.models.torch_models.torch_model import TorchModel
class Pagtn(nn.Module):
"""Model for Graph Property Prediction
This model proceeds as follows:
* Update node representations in graphs with a variant of GAT, where a
linear additive form of attention is applied. Attention Weights are derived
by concatenating the node and edge features for each bond.
* Update node representations with multiple rounds of message passing.
* For each layer has, residual connections with its previous layer.
* The final molecular representation is computed by combining the representations
of all nodes in the molecule.
* Perform the final prediction using a linear layer
Examples
--------
>>> import deepchem as dc
>>> import dgl
>>> from deepchem.models import Pagtn
>>> smiles = ["C1CCC1", "C1=CC=CN=C1"]
>>> featurizer = dc.feat.PagtnMolGraphFeaturizer(max_length=5)
>>> graphs = featurizer.featurize(smiles)
>>> print(type(graphs[0]))
<class 'deepchem.feat.graph_data.GraphData'>
>>> dgl_graphs = [graphs[i].to_dgl_graph() for i in range(len(graphs))]
>>> batch_dgl_graph = dgl.batch(dgl_graphs)
>>> model = Pagtn(n_tasks=1, mode='regression')
>>> preds = model(batch_dgl_graph)
>>> print(type(preds))
<class 'torch.Tensor'>
>>> preds.shape == (2, 1)
True
References
----------
.. [1] Benson Chen, Regina Barzilay, Tommi Jaakkola. "Path-Augmented
Graph Transformer Network." arXiv:1905.12712
Notes
-----
This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci
(https://github.com/awslabs/dgl-lifesci) to be installed.
"""
def __init__(self,
n_tasks: int,
number_atom_features: int = 94,
number_bond_features: int = 42,
mode: str = 'regression',
n_classes: int = 2,
output_node_features: int = 256,
hidden_features: int = 32,
num_layers: int = 5,
num_heads: int = 1,
dropout: float = 0.1,
nfeat_name: str = 'x',
efeat_name: str = 'edge_attr',
pool_mode: str = 'sum'):
"""<|fim▁hole|> ----------
n_tasks: int
Number of tasks.
number_atom_features : int
Size for the input node features. Default to 94.
number_bond_features : int
Size for the input edge features. Default to 42.
mode: str
The model type, 'classification' or 'regression'. Default to 'regression'.
n_classes: int
The number of classes to predict per task
(only used when ``mode`` is 'classification'). Default to 2.
output_node_features : int
Size for the output node features in PAGTN layers. Default to 256.
hidden_features : int
Size for the hidden node features in PAGTN layers. Default to 32.
num_layers : int
Number of PAGTN layers to be applied. Default to 5.
num_heads : int
Number of attention heads. Default to 1.
dropout : float
The probability for performing dropout. Default to 0.1
nfeat_name: str
For an input graph ``g``, the model assumes that it stores node features in
``g.ndata[nfeat_name]`` and will retrieve input node features from that.
Default to 'x'.
efeat_name: str
For an input graph ``g``, the model assumes that it stores edge features in
``g.edata[efeat_name]`` and will retrieve input edge features from that.
Default to 'edge_attr'.
pool_mode : 'max' or 'mean' or 'sum'
Whether to compute elementwise maximum, mean or sum of the node representations.
"""
try:
import dgl
except:
raise ImportError('This class requires dgl.')
try:
import dgllife
except:
raise ImportError('This class requires dgllife.')
if mode not in ['classification', 'regression']:
raise ValueError("mode must be either 'classification' or 'regression'")
super(Pagtn, self).__init__()
self.n_tasks = n_tasks
self.mode = mode
self.n_classes = n_classes
self.nfeat_name = nfeat_name
self.efeat_name = efeat_name
if mode == 'classification':
out_size = n_tasks * n_classes
else:
out_size = n_tasks
from dgllife.model import PAGTNPredictor as DGLPAGTNPredictor
self.model = DGLPAGTNPredictor(
node_in_feats=number_atom_features,
node_out_feats=output_node_features,
node_hid_feats=hidden_features,
edge_feats=number_bond_features,
depth=num_layers,
nheads=num_heads,
dropout=dropout,
n_tasks=out_size,
mode=pool_mode)
def forward(self, g):
"""Predict graph labels
Parameters
----------
g: DGLGraph
A DGLGraph for a batch of graphs. It stores the node features in
``dgl_graph.ndata[self.nfeat_name]`` and edge features in
``dgl_graph.edata[self.efeat_name]``.
Returns
-------
torch.Tensor
The model output.
* When self.mode = 'regression',
its shape will be ``(dgl_graph.batch_size, self.n_tasks)``.
* When self.mode = 'classification', the output consists of probabilities
for classes. Its shape will be
``(dgl_graph.batch_size, self.n_tasks, self.n_classes)`` if self.n_tasks > 1;
its shape will be ``(dgl_graph.batch_size, self.n_classes)`` if self.n_tasks is 1.
torch.Tensor, optional
This is only returned when self.mode = 'classification', the output consists of the
logits for classes before softmax.
"""
node_feats = g.ndata[self.nfeat_name]
edge_feats = g.edata[self.efeat_name]
out = self.model(g, node_feats, edge_feats)
if self.mode == 'classification':
if self.n_tasks == 1:
logits = out.view(-1, self.n_classes)
softmax_dim = 1
else:
logits = out.view(-1, self.n_tasks, self.n_classes)
softmax_dim = 2
proba = F.softmax(logits, dim=softmax_dim)
return proba, logits
else:
return out
class PagtnModel(TorchModel):
"""Model for Graph Property Prediction.
This model proceeds as follows:
* Update node representations in graphs with a variant of GAT, where a
linear additive form of attention is applied. Attention Weights are derived
by concatenating the node and edge features for each bond.
* Update node representations with multiple rounds of message passing.
* For each layer has, residual connections with its previous layer.
* The final molecular representation is computed by combining the representations
of all nodes in the molecule.
* Perform the final prediction using a linear layer
Examples
--------
>>> import deepchem as dc
>>> from deepchem.models import PagtnModel
>>> # preparing dataset
>>> smiles = ["C1CCC1", "CCC"]
>>> labels = [0., 1.]
>>> featurizer = dc.feat.PagtnMolGraphFeaturizer(max_length=5)
>>> X = featurizer.featurize(smiles)
>>> dataset = dc.data.NumpyDataset(X=X, y=labels)
>>> # training model
>>> model = PagtnModel(mode='classification', n_tasks=1,
... batch_size=16, learning_rate=0.001)
>>> loss = model.fit(dataset, nb_epoch=5)
References
----------
.. [1] Benson Chen, Regina Barzilay, Tommi Jaakkola. "Path-Augmented
Graph Transformer Network." arXiv:1905.12712
Notes
-----
This class requires DGL (https://github.com/dmlc/dgl) and DGL-LifeSci
(https://github.com/awslabs/dgl-lifesci) to be installed.
"""
def __init__(self,
n_tasks: int,
number_atom_features: int = 94,
number_bond_features: int = 42,
mode: str = 'regression',
n_classes: int = 2,
output_node_features: int = 256,
hidden_features: int = 32,
num_layers: int = 5,
num_heads: int = 1,
dropout: float = 0.1,
pool_mode: str = 'sum',
**kwargs):
"""
Parameters
----------
n_tasks: int
Number of tasks.
number_atom_features : int
Size for the input node features. Default to 94.
number_bond_features : int
Size for the input edge features. Default to 42.
mode: str
The model type, 'classification' or 'regression'. Default to 'regression'.
n_classes: int
The number of classes to predict per task
(only used when ``mode`` is 'classification'). Default to 2.
output_node_features : int
Size for the output node features in PAGTN layers. Default to 256.
hidden_features : int
Size for the hidden node features in PAGTN layers. Default to 32.
num_layers: int
Number of graph neural network layers, i.e. number of rounds of message passing.
Default to 2.
num_heads : int
Number of attention heads. Default to 1.
dropout: float
Dropout probability. Default to 0.1
pool_mode : 'max' or 'mean' or 'sum'
Whether to compute elementwise maximum, mean or sum of the node representations.
kwargs
This can include any keyword argument of TorchModel.
"""
model = Pagtn(
n_tasks=n_tasks,
number_atom_features=number_atom_features,
number_bond_features=number_bond_features,
mode=mode,
n_classes=n_classes,
output_node_features=output_node_features,
hidden_features=hidden_features,
num_layers=num_layers,
num_heads=num_heads,
dropout=dropout,
pool_mode=pool_mode)
if mode == 'regression':
loss: Loss = L2Loss()
output_types = ['prediction']
else:
loss = SparseSoftmaxCrossEntropy()
output_types = ['prediction', 'loss']
super(PagtnModel, self).__init__(
model, loss=loss, output_types=output_types, **kwargs)
def _prepare_batch(self, batch):
"""Create batch data for Pagtn.
Parameters
----------
batch: tuple
The tuple is ``(inputs, labels, weights)``.
Returns
-------
inputs: DGLGraph
DGLGraph for a batch of graphs.
labels: list of torch.Tensor or None
The graph labels.
weights: list of torch.Tensor or None
The weights for each sample or sample/task pair converted to torch.Tensor.
"""
try:
import dgl
except:
raise ImportError('This class requires dgl.')
inputs, labels, weights = batch
dgl_graphs = [graph.to_dgl_graph() for graph in inputs[0]]
inputs = dgl.batch(dgl_graphs).to(self.device)
_, labels, weights = super(PagtnModel, self)._prepare_batch(([], labels,
weights))
return inputs, labels, weights<|fim▁end|>
|
Parameters
|
<|file_name|>scripts-parse.py<|end_file_name|><|fim▁begin|>import sys
from unicode_parse_common import *
# http://www.unicode.org/Public/5.1.0/ucd/Scripts.txt
script_to_harfbuzz = {
# This is the list of HB_Script_* at the time of writing
'Common': 'HB_Script_Common',
'Greek': 'HB_Script_Greek',
'Cyrillic': 'HB_Script_Cyrillic',
'Armenian': 'HB_Script_Armenian',
'Hebrew': 'HB_Script_Hebrew',
'Arabic': 'HB_Script_Arabic',
'Syriac': 'HB_Script_Syriac',
'Thaana': 'HB_Script_Thaana',
'Devanagari': 'HB_Script_Devanagari',
'Bengali': 'HB_Script_Bengali',
'Gurmukhi': 'HB_Script_Gurmukhi',
'Gujarati': 'HB_Script_Gujarati',
'Oriya': 'HB_Script_Oriya',
'Tamil': 'HB_Script_Tamil',
'Telugu': 'HB_Script_Telugu',
'Kannada': 'HB_Script_Kannada',
'Malayalam': 'HB_Script_Malayalam',
'Sinhala': 'HB_Script_Sinhala',
'Thai': 'HB_Script_Thai',
'Lao': 'HB_Script_Lao',
'Tibetan': 'HB_Script_Tibetan',
'Myanmar': 'HB_Script_Myanmar',
'Georgian': 'HB_Script_Georgian',
'Hangul': 'HB_Script_Hangul',
'Ogham': 'HB_Script_Ogham',
'Runic': 'HB_Script_Runic',
'Khmer': 'HB_Script_Khmer',
'Inherited': 'HB_Script_Inherited',
}
class ScriptDict(object):
def __init__(self, base):
self.base = base
def __getitem__(self, key):
r = self.base.get(key, None)
if r is None:
return 'HB_Script_Common'
return r<|fim▁hole|>def main(infile, outfile):
ranges = unicode_file_parse(infile,
ScriptDict(script_to_harfbuzz),
'HB_Script_Common')
ranges = sort_and_merge(ranges)
print >>outfile, '// Generated from Unicode script tables\n'
print >>outfile, '#ifndef SCRIPT_PROPERTIES_H_'
print >>outfile, '#define SCRIPT_PROPERTIES_H_\n'
print >>outfile, '#include <stdint.h>'
print >>outfile, '#include "harfbuzz-shaper.h"\n'
print >>outfile, 'struct script_property {'
print >>outfile, ' uint32_t range_start;'
print >>outfile, ' uint32_t range_end;'
print >>outfile, ' HB_Script script;'
print >>outfile, '};\n'
print >>outfile, 'static const struct script_property script_properties[] = {'
for (start, end, value) in ranges:
print >>outfile, ' {0x%x, 0x%x, %s},' % (start, end, value)
print >>outfile, '};\n'
print >>outfile, 'static const unsigned script_properties_count = %d;\n' % len(ranges)
print >>outfile, '#endif // SCRIPT_PROPERTIES_H_'
if __name__ == '__main__':
if len(sys.argv) != 3:
print 'Usage: %s <input .txt> <output .h>' % sys.argv[0]
else:
main(file(sys.argv[1], 'r'), file(sys.argv[2], 'w+'))<|fim▁end|>
| |
<|file_name|>test_bdist_pex.py<|end_file_name|><|fim▁begin|># Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import subprocess
import sys
from textwrap import dedent
from twitter.common.contextutil import pushd
from pex.testing import temporary_content
def assert_entry_points(entry_points):
setup_py = dedent("""
from setuptools import setup
<|fim▁hole|> name='my_app',
version='0.0.0',
zip_safe=True,
packages=[''],
entry_points=%(entry_points)r,
)
""" % dict(entry_points=entry_points))
my_app = dedent("""
def do_something():
print("hello world!")
""")
with temporary_content({'setup.py': setup_py, 'my_app.py': my_app}) as project_dir:
with pushd(project_dir):
subprocess.check_call([sys.executable, 'setup.py', 'bdist_pex'])
process = subprocess.Popen([os.path.join(project_dir, 'dist', 'my_app-0.0.0.pex')],
stdout=subprocess.PIPE)
stdout, _ = process.communicate()
assert 0 == process.returncode
assert stdout == b'hello world!\n'
def test_entry_points_dict():
assert_entry_points({'console_scripts': ['my_app = my_app:do_something']})
def test_entry_points_ini_string():
assert_entry_points(dedent("""
[console_scripts]
my_app=my_app:do_something
"""))<|fim▁end|>
|
setup(
|
<|file_name|>nodes.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use std::ops::Deref;
// self
use crate::geom::*;
use super::attributes::*;
// TODO: implement Default for all
/// Node's kind.
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub enum NodeKind {
Svg(Svg),
Defs,
LinearGradient(LinearGradient),
RadialGradient(RadialGradient),
ClipPath(ClipPath),
Mask(Mask),
Pattern(Pattern),
Filter(Filter),
Path(Path),
Text(Text),
Image(Image),
Group(Group),
}
impl NodeKind {
/// Returns node's ID.
///
/// If a current node doesn't support ID - an empty string
/// will be returned.
pub fn id(&self) -> &str {
match *self {
NodeKind::Svg(_) => "",
NodeKind::Defs => "",
NodeKind::LinearGradient(ref e) => e.id.as_str(),
NodeKind::RadialGradient(ref e) => e.id.as_str(),
NodeKind::ClipPath(ref e) => e.id.as_str(),
NodeKind::Mask(ref e) => e.id.as_str(),
NodeKind::Pattern(ref e) => e.id.as_str(),
NodeKind::Filter(ref e) => e.id.as_str(),
NodeKind::Path(ref e) => e.id.as_str(),
NodeKind::Text(ref e) => e.id.as_str(),
NodeKind::Image(ref e) => e.id.as_str(),
NodeKind::Group(ref e) => e.id.as_str(),
}
}
/// Returns node's transform.
///
/// If a current node doesn't support transformation - a default
/// transform will be returned.
pub fn transform(&self) -> Transform {
match *self {
NodeKind::Svg(_) => Transform::default(),
NodeKind::Defs => Transform::default(),
NodeKind::LinearGradient(ref e) => e.transform,
NodeKind::RadialGradient(ref e) => e.transform,
NodeKind::ClipPath(ref e) => e.transform,
NodeKind::Mask(_) => Transform::default(),
NodeKind::Pattern(ref e) => e.transform,
NodeKind::Filter(_) => Transform::default(),
NodeKind::Path(ref e) => e.transform,
NodeKind::Text(ref e) => e.transform,
NodeKind::Image(ref e) => e.transform,
NodeKind::Group(ref e) => e.transform,
}
}
}
/// An SVG root element.
#[derive(Clone, Copy, Debug)]
pub struct Svg {
/// Image size.
///
/// Size of an image that should be created to fit the SVG.
///
/// `width` and `height` in SVG.
pub size: Size,
/// SVG viewbox.
///
/// Specifies which part of the SVG image should be rendered.
///
/// `viewBox` and `preserveAspectRatio` in SVG.
pub view_box: ViewBox,
}
/// A path element.
#[derive(Clone, Debug)]
pub struct Path {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Isn't automatically generated.
/// Can be empty.
pub id: String,
/// Element transform.
pub transform: Transform,
/// Element visibility.
pub visibility: Visibility,
/// Fill style.
pub fill: Option<Fill>,
/// Stroke style.
pub stroke: Option<Stroke>,
/// Rendering mode.
///
/// `shape-rendering` in SVG.
pub rendering_mode: ShapeRendering,
/// Segments list.
///
/// All segments are in absolute coordinates.
pub segments: Vec<PathSegment>,
}
impl Default for Path {
fn default() -> Self {
Path {
id: String::new(),
transform: Transform::default(),
visibility: Visibility::Visible,
fill: None,
stroke: None,
rendering_mode: ShapeRendering::default(),
segments: Vec::new(),
}
}
}
/// A text element.
///
/// `text` element in SVG.
#[derive(Clone, Debug)]
pub struct Text {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Isn't automatically generated.
/// Can be empty.
pub id: String,
/// Element transform.
pub transform: Transform,
/// Rotate list.
///
/// If set, contains a list of rotation angles for each *code point* in the `text`.
pub rotate: Option<Vec<f64>>,
/// Rendering mode.
///
/// `text-rendering` in SVG.
pub rendering_mode: TextRendering,
/// A list of text chunks.
pub chunks: Vec<TextChunk>,
}
/// A text chunk.
///
/// Contains position and anchor of the next
/// [text chunk](https://www.w3.org/TR/SVG11/text.html#TextChunk).
///
/// Doesn't represented in SVG directly. Usually, it's a first `tspan` or text node
/// and any `tspan` that defines either `x` or `y` coordinates.
///
/// *Note:* `usvg` text chunk isn't strictly the same as an SVG one,
/// because text chunk should be defined only by `x` or `y` coordinates.
/// But since `resvg` backends doesn't have a direct access to glyphs/graphemes
/// we have to create chunks on `dx`, `dy` and non-zero `rotate` to simulate this.
/// This is incorrect, since it breaks text shaping
/// and BIDI reordering (resvg doesn't support this one anyway).
#[derive(Clone, Debug)]
pub struct TextChunk {
/// An optional absolute positions along the X-axis.
pub x: Option<f64>,
/// An optional absolute positions along the Y-axis.
pub y: Option<f64>,
/// An optional relative positions along the X-axis.
pub dx: Option<f64>,
/// An optional relative positions along the Y-axis.
pub dy: Option<f64>,
/// A text anchor/align.
pub anchor: TextAnchor,
/// A list of text spans.
pub spans: Vec<TextSpan>,
}
/// A text span.
///
/// `tspan` element in SVG.
#[derive(Clone, Debug)]
pub struct TextSpan {
/// Element visibility.
pub visibility: Visibility,
/// Fill style.
pub fill: Option<Fill>,
/// Stroke style.
pub stroke: Option<Stroke>,
/// Font description.
pub font: Font,
/// Baseline shift.
pub baseline_shift: f64,
/// Text decoration.
///
/// Unlike `text-decoration` attribute from the SVG, this one has all styles resolved.
/// Basically, by the SVG `text-decoration` attribute can be defined on `tspan` element
/// and on any parent element. And all definitions should be combined.
/// The one that was defined by `tspan` uses the `tspan` style itself.
/// The one that was defined by any parent node uses the `text` element style.
/// So it's pretty hard to resolve.
///
/// This property has all this stuff resolved.
pub decoration: TextDecoration,
/// An actual text line.
///
/// SVG doesn't support multiline text, so this property doesn't have a new line inside of it.
/// All the spaces are already trimmed or preserved, depending on the `xml:space` attribute.
/// All characters references are already resolved, so there is no `>` or `P`.
/// So this text should be rendered as is, without any postprocessing.
pub text: String,
}
/// A raster image element.
///
/// `image` element in SVG.
#[derive(Clone, Debug)]
pub struct Image {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Isn't automatically generated.
/// Can be empty.
pub id: String,
/// Element transform.
pub transform: Transform,
/// Element visibility.
pub visibility: Visibility,
/// An image rectangle in which it should be fit.
///
/// Combination of the `x`, `y`, `width`, `height` and `preserveAspectRatio`
/// attributes.
pub view_box: ViewBox,
/// Rendering mode.
///
/// `image-rendering` in SVG.
pub rendering_mode: ImageRendering,
/// Image data.
pub data: ImageData,
/// Image data kind.
pub format: ImageFormat,
}
/// A group container.
///
/// The preprocessor will remove all groups that don't impact rendering.
/// Those that left is just an indicator that a new canvas should be created.
///
/// `g` element in SVG.
#[derive(Clone, Debug)]
pub struct Group {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Isn't automatically generated.
/// Can be empty.
pub id: String,
/// Element transform.
pub transform: Transform,
/// Group opacity.
///
/// After the group is rendered we should combine
/// it with a parent group using the specified opacity.
pub opacity: Opacity,
/// Element clip path.
pub clip_path: Option<String>,
/// Element mask.
pub mask: Option<String>,
/// Element filter.
pub filter: Option<String>,
}
impl Default for Group {
fn default() -> Self {
Group {
id: String::new(),
transform: Transform::default(),
opacity: Opacity::default(),
clip_path: None,
mask: None,
filter: None,
}
}
}<|fim▁hole|>
/// A generic gradient.
#[derive(Clone, Debug)]
pub struct BaseGradient {
/// Coordinate system units.
///
/// `gradientUnits` in SVG.
pub units: Units,
/// Gradient transform.
///
/// `gradientTransform` in SVG.
pub transform: Transform,
/// Gradient spreading method.
///
/// `spreadMethod` in SVG.
pub spread_method: SpreadMethod,
/// A list of `stop` elements.
pub stops: Vec<Stop>,
}
/// A linear gradient.
///
/// `linearGradient` element in SVG.
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub struct LinearGradient {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Can't be empty.
pub id: String,
pub x1: f64,
pub y1: f64,
pub x2: f64,
pub y2: f64,
/// Base gradient data.
pub base: BaseGradient,
}
impl Deref for LinearGradient {
type Target = BaseGradient;
fn deref(&self) -> &Self::Target {
&self.base
}
}
/// A radial gradient.
///
/// `radialGradient` element in SVG.
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub struct RadialGradient {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Can't be empty.
pub id: String,
pub cx: f64,
pub cy: f64,
pub r: PositiveNumber,
pub fx: f64,
pub fy: f64,
/// Base gradient data.
pub base: BaseGradient,
}
impl Deref for RadialGradient {
type Target = BaseGradient;
fn deref(&self) -> &Self::Target {
&self.base
}
}
/// Gradient's stop element.
///
/// `stop` element in SVG.
#[derive(Clone, Copy, Debug)]
pub struct Stop {
/// Gradient stop offset.
///
/// `offset` in SVG.
pub offset: StopOffset,
/// Gradient stop color.
///
/// `stop-color` in SVG.
pub color: Color,
/// Gradient stop opacity.
///
/// `stop-opacity` in SVG.
pub opacity: Opacity,
}
/// A clip-path element.
///
/// `clipPath` element in SVG.
#[derive(Clone, Debug)]
pub struct ClipPath {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Can't be empty.
pub id: String,
/// Coordinate system units.
///
/// `clipPathUnits` in SVG.
pub units: Units,
/// Clip path transform.
///
/// `transform` in SVG.
pub transform: Transform,
/// Additional clip path.
///
/// `clip-path` in SVG.
pub clip_path: Option<String>,
}
impl Default for ClipPath {
fn default() -> Self {
ClipPath {
id: String::new(),
units: Units::UserSpaceOnUse,
transform: Transform::default(),
clip_path: None,
}
}
}
/// A mask element.
///
/// `mask` element in SVG.
#[derive(Clone, Debug)]
pub struct Mask {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Can't be empty.
pub id: String,
/// Coordinate system units.
///
/// `maskUnits` in SVG.
pub units: Units,
/// Content coordinate system units.
///
/// `maskContentUnits` in SVG.
pub content_units: Units,
/// Mask rectangle.
///
/// `x`, `y`, `width` and `height` in SVG.
pub rect: Rect,
/// Additional mask.
///
/// `mask` in SVG.
pub mask: Option<String>,
}
/// A pattern element.
///
/// `pattern` element in SVG.
#[derive(Clone, Debug)]
pub struct Pattern {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Can't be empty.
pub id: String,
/// Coordinate system units.
///
/// `patternUnits` in SVG.
pub units: Units,
// TODO: should not be accessible when `viewBox` is present.
/// Content coordinate system units.
///
/// `patternContentUnits` in SVG.
pub content_units: Units,
/// Pattern transform.
///
/// `patternTransform` in SVG.
pub transform: Transform,
/// Pattern rectangle.
///
/// `x`, `y`, `width` and `height` in SVG.
pub rect: Rect,
/// Pattern viewbox.
pub view_box: Option<ViewBox>,
}
/// A filter element.
///
/// `filter` element in the SVG.
#[derive(Clone, Debug)]
pub struct Filter {
/// Element's ID.
///
/// Taken from the SVG itself.
/// Can't be empty.
pub id: String,
/// Region coordinate system units.
///
/// `filterUnits` in the SVG.
pub units: Units,
/// Content coordinate system units.
///
/// `primitiveUnits` in the SVG.
pub primitive_units: Units,
/// Filter region.
///
/// `x`, `y`, `width` and `height` in the SVG.
pub rect: Rect,
/// A list of filter primitives.
pub children: Vec<FilterPrimitive>,
}
/// A filter primitive element.
#[derive(Clone, Debug)]
pub struct FilterPrimitive {
/// `x` coordinate of the filter subregion.
pub x: Option<f64>,
/// `y` coordinate of the filter subregion.
pub y: Option<f64>,
/// The filter subregion width.
pub width: Option<f64>,
/// The filter subregion height.
pub height: Option<f64>,
/// Color interpolation mode.
///
/// `color-interpolation-filters` in the SVG.
pub color_interpolation: ColorInterpolation,
/// Assigned name for this filter primitive.
///
/// `result` in the SVG.
pub result: String,
/// Filter primitive kind.
pub kind: FilterKind,
}
/// A filter kind.
#[allow(missing_docs)]
#[derive(Clone, Debug)]
pub enum FilterKind {
FeBlend(FeBlend),
FeComposite(FeComposite),
FeFlood(FeFlood),
FeGaussianBlur(FeGaussianBlur),
FeImage(FeImage),
FeMerge(FeMerge),
FeOffset(FeOffset),
FeTile(FeTile),
}
/// A blend filter primitive.
///
/// `feBlend` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeBlend {
/// Identifies input for the given filter primitive.
///
/// `in` in the SVG.
pub input1: FilterInput,
/// Identifies input for the given filter primitive.
///
/// `in2` in the SVG.
pub input2: FilterInput,
/// A blending mode.
///
/// `mode` in the SVG.
pub mode: FeBlendMode,
}
/// A composite filter primitive.
///
/// `feComposite` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeComposite {
/// Identifies input for the given filter primitive.
///
/// `in` in the SVG.
pub input1: FilterInput,
/// Identifies input for the given filter primitive.
///
/// `in2` in the SVG.
pub input2: FilterInput,
/// A compositing operation.
///
/// `operator` in the SVG.
pub operator: FeCompositeOperator,
}
/// A flood filter primitive.
///
/// `feFlood` element in the SVG.
#[derive(Clone, Copy, Debug)]
pub struct FeFlood {
/// A flood color.
///
/// `flood-color` in the SVG.
pub color: Color,
/// A flood opacity.
///
/// `flood-opacity` in the SVG.
pub opacity: Opacity,
}
/// A Gaussian blur filter primitive.
///
/// `feGaussianBlur` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeGaussianBlur {
/// Identifies input for the given filter primitive.
///
/// `in` in the SVG.
pub input: FilterInput,
/// A standard deviation along the X-axis.
///
/// `stdDeviation` in the SVG.
pub std_dev_x: PositiveNumber,
/// A standard deviation along the Y-axis.
///
/// `stdDeviation` in the SVG.
pub std_dev_y: PositiveNumber,
}
/// An image filter primitive.
///
/// `feImage` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeImage {
/// Value of the `preserveAspectRatio` attribute.
pub aspect: AspectRatio,
/// Rendering method.
///
/// `image-rendering` in SVG.
pub rendering_mode: ImageRendering,
/// Image data.
pub data: FeImageKind,
}
/// A merge filter primitive.
///
/// `feMerge` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeMerge {
/// List of input layers that should be merged.
///
/// List of `feMergeNode`'s in the SVG.
pub inputs: Vec<FilterInput>,
}
/// An offset filter primitive.
///
/// `feOffset` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeOffset {
/// Identifies input for the given filter primitive.
///
/// `in` in the SVG.
pub input: FilterInput,
/// The amount to offset the input graphic along the X-axis.
pub dx: f64,
/// The amount to offset the input graphic along the Y-axis.
pub dy: f64,
}
/// A tile filter primitive.
///
/// `feTile` element in the SVG.
#[derive(Clone, Debug)]
pub struct FeTile {
/// Identifies input for the given filter primitive.
///
/// `in` in the SVG.
pub input: FilterInput,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn node_kind_size() {
assert!(std::mem::size_of::<NodeKind>() <= 256);
}
}<|fim▁end|>
| |
<|file_name|>opts.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Configuration options for a single run of the servo application. Created
//! from command line arguments.
use euclid::size::{Size2D, TypedSize2D};
use geometry::ScreenPx;
use getopts::Options;
use num_cpus;
use prefs::{self, PrefValue};
use resource_files::set_resources_path;
use std::cmp;
use std::default::Default;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering};
use url::{self, Url};
/// Global flags for Servo, currently set on the command line.
#[derive(Clone, Deserialize, Serialize)]
pub struct Opts {
pub is_running_problem_test: bool,
/// The initial URL to load.
pub url: Option<Url>,
/// How many threads to use for CPU painting (`-t`).
///
/// Note that painting is sequentialized when using GPU painting.
pub paint_threads: usize,
/// True to use GPU painting via Skia-GL, false to use CPU painting via Skia (`-g`). Note that
/// compositing is always done on the GPU.
pub gpu_painting: bool,
/// The maximum size of each tile in pixels (`-s`).
pub tile_size: usize,
/// The ratio of device pixels per px at the default scale. If unspecified, will use the
/// platform default setting.
pub device_pixels_per_px: Option<f32>,
/// `None` to disable the time profiler or `Some` with an interval in seconds to enable it and
/// cause it to produce output on that interval (`-p`).
pub time_profiler_period: Option<f64>,
/// `None` to disable the memory profiler or `Some` with an interval in seconds to enable it
/// and cause it to produce output on that interval (`-m`).
pub mem_profiler_period: Option<f64>,
/// The number of threads to use for layout (`-y`). Defaults to 1, which results in a recursive
/// sequential algorithm.
pub layout_threads: usize,
pub nonincremental_layout: bool,
/// Where to load userscripts from, if any. An empty string will load from
/// the resources/user-agent-js directory, and if the option isn't passed userscripts
/// won't be loaded
pub userscripts: Option<String>,
pub user_stylesheets: Vec<(Vec<u8>, Url)>,
pub output_file: Option<String>,
/// Replace unpaires surrogates in DOM strings with U+FFFD.
/// See https://github.com/servo/servo/issues/6564
pub replace_surrogates: bool,
/// Log GC passes and their durations.
pub gc_profile: bool,
/// Load web fonts synchronously to avoid non-deterministic network-driven reflows.
pub load_webfonts_synchronously: bool,
pub headless: bool,
pub hard_fail: bool,
/// True if we should bubble intrinsic widths sequentially (`-b`). If this is true, then
/// intrinsic widths are computed as a separate pass instead of during flow construction. You
/// may wish to turn this flag on in order to benchmark style recalculation against other
/// browser engines.
pub bubble_inline_sizes_separately: bool,
/// True if we should show borders on all layers and tiles for
/// debugging purposes (`--show-debug-borders`).
pub show_debug_borders: bool,
/// True if we should show borders on all fragments for debugging purposes
/// (`--show-debug-fragment-borders`).
pub show_debug_fragment_borders: bool,
/// True if we should paint tiles with overlays based on which thread painted them.
pub show_debug_parallel_paint: bool,
/// True if we should paint borders around flows based on which thread painted them.
pub show_debug_parallel_layout: bool,
/// True if we should paint tiles a random color whenever they're repainted. Useful for
/// debugging invalidation.
pub paint_flashing: bool,
/// If set with --disable-text-aa, disable antialiasing on fonts. This is primarily useful for reftests
/// where pixel perfect results are required when using fonts such as the Ahem
/// font for layout tests.
pub enable_text_antialiasing: bool,
/// If set with --disable-canvas-aa, disable antialiasing on the HTML canvas element.
/// Like --disable-text-aa, this is useful for reftests where pixel perfect results are required.
pub enable_canvas_antialiasing: bool,
/// True if each step of layout is traced to an external JSON file
/// for debugging purposes. Settings this implies sequential layout
/// and paint.
pub trace_layout: bool,
/// If true, instrument the runtime for each task created and dump
/// that information to a JSON file that can be viewed in the task
/// profile viewer.
pub profile_tasks: bool,
/// Periodically print out on which events script tasks spend their processing time.
pub profile_script_events: bool,
/// Enable all heartbeats for profiling.
pub profile_heartbeats: bool,
/// `None` to disable devtools or `Some` with a port number to start a server to listen to
/// remote Firefox devtools connections.
pub devtools_port: Option<u16>,
/// `None` to disable WebDriver or `Some` with a port number to start a server to listen to
/// remote WebDriver commands.
pub webdriver_port: Option<u16>,
/// The initial requested size of the window.
pub initial_window_size: TypedSize2D<ScreenPx, u32>,
/// An optional string allowing the user agent to be set for testing.
pub user_agent: String,<|fim▁hole|> pub multiprocess: bool,
/// Whether we're running inside the sandbox.
pub sandbox: bool,
/// Dumps the flow tree after a layout.
pub dump_flow_tree: bool,
/// Dumps the display list after a layout.
pub dump_display_list: bool,
/// Dumps the display list in JSON form after a layout.
pub dump_display_list_json: bool,
/// Dumps the display list after optimization (post layout, at painting time).
pub dump_display_list_optimized: bool,
/// Dumps the layer tree when it changes.
pub dump_layer_tree: bool,
/// Emits notifications when there is a relayout.
pub relayout_event: bool,
/// Whether to show an error when display list geometry escapes flow overflow regions.
pub validate_display_list_geometry: bool,
/// Whether Style Sharing Cache is used
pub disable_share_style_cache: bool,
/// Whether to run absolute position calculation and display list construction in parallel.
pub parallel_display_list_building: bool,
/// Translate mouse input into touch events.
pub convert_mouse_to_touch: bool,
/// True to exit after the page load (`-x`).
pub exit_after_load: bool,
/// Do not use native titlebar
pub no_native_titlebar: bool,
/// Enable vsync in the compositor
pub enable_vsync: bool,
}
fn print_usage(app: &str, opts: &Options) {
let message = format!("Usage: {} [ options ... ] [URL]\n\twhere options include", app);
println!("{}", opts.usage(&message));
}
/// Debug options for Servo, currently set on the command line with -Z
#[derive(Default)]
pub struct DebugOptions {
/// List all the debug options.
pub help: bool,
/// Bubble intrinsic widths separately like other engines.
pub bubble_widths: bool,
/// Disable antialiasing of rendered text.
pub disable_text_aa: bool,
/// Disable antialiasing of rendered text on the HTML canvas element.
pub disable_canvas_aa: bool,
/// Print the flow tree after each layout.
pub dump_flow_tree: bool,
/// Print the display list after each layout.
pub dump_display_list: bool,
/// Print the display list in JSON form.
pub dump_display_list_json: bool,
/// Print optimized display list (at paint time).
pub dump_display_list_optimized: bool,
/// Print the layer tree whenever it changes.
pub dump_layer_tree: bool,
/// Print notifications when there is a relayout.
pub relayout_event: bool,
/// Instrument each task, writing the output to a file.
pub profile_tasks: bool,
/// Profile which events script tasks spend their time on.
pub profile_script_events: bool,
/// Enable all heartbeats for profiling.
pub profile_heartbeats: bool,
/// Paint borders along layer and tile boundaries.
pub show_compositor_borders: bool,
/// Paint borders along fragment boundaries.
pub show_fragment_borders: bool,
/// Overlay tiles with colors showing which thread painted them.
pub show_parallel_paint: bool,
/// Mark which thread laid each flow out with colors.
pub show_parallel_layout: bool,
/// Overlay repainted areas with a random color.
pub paint_flashing: bool,
/// Write layout trace to an external file for debugging.
pub trace_layout: bool,
/// Display an error when display list geometry escapes overflow region.
pub validate_display_list_geometry: bool,
/// Disable the style sharing cache.
pub disable_share_style_cache: bool,
/// Build display lists in parallel.
pub parallel_display_list_building: bool,
/// Translate mouse input into touch events.
pub convert_mouse_to_touch: bool,
/// Replace unpaires surrogates in DOM strings with U+FFFD.
/// See https://github.com/servo/servo/issues/6564
pub replace_surrogates: bool,
/// Log GC passes and their durations.
pub gc_profile: bool,
/// Load web fonts synchronously to avoid non-deterministic network-driven reflows.
pub load_webfonts_synchronously: bool,
/// Disable vsync in the compositor
pub disable_vsync: bool,
}
impl DebugOptions {
pub fn new(debug_string: &str) -> Result<DebugOptions, &str> {
let mut debug_options = DebugOptions::default();
for option in debug_string.split(',') {
match option {
"help" => debug_options.help = true,
"bubble-widths" => debug_options.bubble_widths = true,
"disable-text-aa" => debug_options.disable_text_aa = true,
"disable-canvas-aa" => debug_options.disable_text_aa = true,
"dump-flow-tree" => debug_options.dump_flow_tree = true,
"dump-display-list" => debug_options.dump_display_list = true,
"dump-display-list-json" => debug_options.dump_display_list_json = true,
"dump-display-list-optimized" => debug_options.dump_display_list_optimized = true,
"dump-layer-tree" => debug_options.dump_layer_tree = true,
"relayout-event" => debug_options.relayout_event = true,
"profile-tasks" => debug_options.profile_tasks = true,
"profile-script-events" => debug_options.profile_script_events = true,
"profile-heartbeats" => debug_options.profile_heartbeats = true,
"show-compositor-borders" => debug_options.show_compositor_borders = true,
"show-fragment-borders" => debug_options.show_fragment_borders = true,
"show-parallel-paint" => debug_options.show_parallel_paint = true,
"show-parallel-layout" => debug_options.show_parallel_layout = true,
"paint-flashing" => debug_options.paint_flashing = true,
"trace-layout" => debug_options.trace_layout = true,
"validate-display-list-geometry" => debug_options.validate_display_list_geometry = true,
"disable-share-style-cache" => debug_options.disable_share_style_cache = true,
"parallel-display-list-building" => debug_options.parallel_display_list_building = true,
"convert-mouse-to-touch" => debug_options.convert_mouse_to_touch = true,
"replace-surrogates" => debug_options.replace_surrogates = true,
"gc-profile" => debug_options.gc_profile = true,
"load-webfonts-synchronously" => debug_options.load_webfonts_synchronously = true,
"disable-vsync" => debug_options.disable_vsync = true,
"" => {},
_ => return Err(option)
};
};
Ok(debug_options)
}
}
pub fn print_debug_usage(app: &str) -> ! {
fn print_option(name: &str, description: &str) {
println!("\t{:<35} {}", name, description);
}
println!("Usage: {} debug option,[options,...]\n\twhere options include\n\nOptions:", app);
print_option("bubble-widths", "Bubble intrinsic widths separately like other engines.");
print_option("disable-text-aa", "Disable antialiasing of rendered text.");
print_option("disable-canvas-aa", "Disable antialiasing on the HTML canvas element.");
print_option("dump-flow-tree", "Print the flow tree after each layout.");
print_option("dump-display-list", "Print the display list after each layout.");
print_option("dump-display-list-json", "Print the display list in JSON form.");
print_option("dump-display-list-optimized", "Print optimized display list (at paint time).");
print_option("dump-layer-tree", "Print the layer tree whenever it changes.");
print_option("relayout-event", "Print notifications when there is a relayout.");
print_option("profile-tasks", "Instrument each task, writing the output to a file.");
print_option("profile-script-events", "Enable profiling of script-related events.");
print_option("profile-heartbeats", "Enable heartbeats for all task categories.");
print_option("show-compositor-borders", "Paint borders along layer and tile boundaries.");
print_option("show-fragment-borders", "Paint borders along fragment boundaries.");
print_option("show-parallel-paint", "Overlay tiles with colors showing which thread painted them.");
print_option("show-parallel-layout", "Mark which thread laid each flow out with colors.");
print_option("paint-flashing", "Overlay repainted areas with a random color.");
print_option("trace-layout", "Write layout trace to an external file for debugging.");
print_option("validate-display-list-geometry",
"Display an error when display list geometry escapes overflow region.");
print_option("disable-share-style-cache",
"Disable the style sharing cache.");
print_option("parallel-display-list-building", "Build display lists in parallel.");
print_option("convert-mouse-to-touch", "Send touch events instead of mouse events");
print_option("replace-surrogates", "Replace unpaires surrogates in DOM strings with U+FFFD. \
See https://github.com/servo/servo/issues/6564");
print_option("gc-profile", "Log GC passes and their durations.");
print_option("load-webfonts-synchronously",
"Load web fonts synchronously to avoid non-deterministic network-driven reflows");
print_option("disable-vsync",
"Disable vsync mode in the compositor to allow profiling at more than monitor refresh rate");
println!("");
process::exit(0)
}
fn args_fail(msg: &str) -> ! {
let mut stderr = io::stderr();
stderr.write_all(msg.as_bytes()).unwrap();
stderr.write_all(b"\n").unwrap();
process::exit(1)
}
// Always use CPU painting on android.
#[cfg(target_os = "android")]
static FORCE_CPU_PAINTING: bool = true;
#[cfg(not(target_os = "android"))]
static FORCE_CPU_PAINTING: bool = false;
static MULTIPROCESS: AtomicBool = ATOMIC_BOOL_INIT;
#[inline]
pub fn multiprocess() -> bool {
MULTIPROCESS.load(Ordering::Relaxed)
}
enum UserAgent {
Desktop,
Android,
Gonk,
}
fn default_user_agent_string(agent: UserAgent) -> String {
#[cfg(all(target_os = "linux", target_arch = "x86_64"))]
const DESKTOP_UA_STRING: &'static str =
"Mozilla/5.0 (X11; Linux x86_64; rv:37.0) Servo/1.0 Firefox/37.0";
#[cfg(all(target_os = "linux", not(target_arch = "x86_64")))]
const DESKTOP_UA_STRING: &'static str =
"Mozilla/5.0 (X11; Linux i686; rv:37.0) Servo/1.0 Firefox/37.0";
#[cfg(all(target_os = "windows", target_arch = "x86_64"))]
const DESKTOP_UA_STRING: &'static str =
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:37.0) Servo/1.0 Firefox/37.0";
#[cfg(all(target_os = "windows", not(target_arch = "x86_64")))]
const DESKTOP_UA_STRING: &'static str =
"Mozilla/5.0 (Windows NT 6.1; rv:37.0) Servo/1.0 Firefox/37.0";
#[cfg(not(any(target_os = "linux", target_os = "windows")))]
// Neither Linux nor Windows, so maybe OS X, and if not then OS X is an okay fallback.
const DESKTOP_UA_STRING: &'static str =
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Servo/1.0 Firefox/37.0";
match agent {
UserAgent::Desktop => {
DESKTOP_UA_STRING
}
UserAgent::Android => {
"Mozilla/5.0 (Android; Mobile; rv:37.0) Servo/1.0 Firefox/37.0"
}
UserAgent::Gonk => {
"Mozilla/5.0 (Mobile; rv:37.0) Servo/1.0 Firefox/37.0"
}
}.to_owned()
}
#[cfg(target_os = "android")]
const DEFAULT_USER_AGENT: UserAgent = UserAgent::Android;
// FIXME: This requires https://github.com/servo/servo/issues/7138 to provide the
// correct string in Gonk builds (i.e., it will never be chosen today).
#[cfg(target_os = "gonk")]
const DEFAULT_USER_AGENT: UserAgent = UserAgent::Gonk;
#[cfg(not(any(target_os = "android", target_os = "gonk")))]
const DEFAULT_USER_AGENT: UserAgent = UserAgent::Desktop;
pub fn default_opts() -> Opts {
Opts {
is_running_problem_test: false,
url: Some(url!("about:blank")),
paint_threads: 1,
gpu_painting: false,
tile_size: 512,
device_pixels_per_px: None,
time_profiler_period: None,
mem_profiler_period: None,
layout_threads: 1,
nonincremental_layout: false,
userscripts: None,
user_stylesheets: Vec::new(),
output_file: None,
replace_surrogates: false,
gc_profile: false,
load_webfonts_synchronously: false,
headless: true,
hard_fail: true,
bubble_inline_sizes_separately: false,
show_debug_borders: false,
show_debug_fragment_borders: false,
show_debug_parallel_paint: false,
show_debug_parallel_layout: false,
paint_flashing: false,
enable_text_antialiasing: false,
enable_canvas_antialiasing: false,
trace_layout: false,
devtools_port: None,
webdriver_port: None,
initial_window_size: Size2D::typed(800, 600),
user_agent: default_user_agent_string(DEFAULT_USER_AGENT),
multiprocess: false,
sandbox: false,
dump_flow_tree: false,
dump_display_list: false,
dump_display_list_json: false,
dump_display_list_optimized: false,
dump_layer_tree: false,
relayout_event: false,
validate_display_list_geometry: false,
profile_tasks: false,
profile_script_events: false,
profile_heartbeats: false,
disable_share_style_cache: false,
parallel_display_list_building: false,
convert_mouse_to_touch: false,
exit_after_load: false,
no_native_titlebar: false,
enable_vsync: true,
}
}
pub fn from_cmdline_args(args: &[String]) -> ArgumentParsingResult {
let (app_name, args) = args.split_first().unwrap();
let mut opts = Options::new();
opts.optflag("c", "cpu", "CPU painting (default)");
opts.optflag("g", "gpu", "GPU painting");
opts.optopt("o", "output", "Output file", "output.png");
opts.optopt("s", "size", "Size of tiles", "512");
opts.optopt("", "device-pixel-ratio", "Device pixels per px", "");
opts.optopt("t", "threads", "Number of paint threads", "1");
opts.optflagopt("p", "profile", "Profiler flag and output interval", "10");
opts.optflagopt("m", "memory-profile", "Memory profiler flag and output interval", "10");
opts.optflag("x", "exit", "Exit after load flag");
opts.optopt("y", "layout-threads", "Number of threads to use for layout", "1");
opts.optflag("i", "nonincremental-layout", "Enable to turn off incremental layout.");
opts.optflag("", "no-ssl", "Disables ssl certificate verification.");
opts.optflagopt("", "userscripts",
"Uses userscripts in resources/user-agent-js, or a specified full path", "");
opts.optmulti("", "user-stylesheet",
"A user stylesheet to be added to every document", "file.css");
opts.optflag("z", "headless", "Headless mode");
opts.optflag("f", "hard-fail", "Exit on task failure instead of displaying about:failure");
opts.optflagopt("", "devtools", "Start remote devtools server on port", "6000");
opts.optflagopt("", "webdriver", "Start remote WebDriver server on port", "7000");
opts.optopt("", "resolution", "Set window resolution.", "800x600");
opts.optopt("u",
"user-agent",
"Set custom user agent string (or android / gonk / desktop for platform default)",
"NCSA Mosaic/1.0 (X11;SunOS 4.1.4 sun4m)");
opts.optflag("M", "multiprocess", "Run in multiprocess mode");
opts.optflag("S", "sandbox", "Run in a sandbox if multiprocess");
opts.optopt("Z", "debug",
"A comma-separated string of debug options. Pass help to show available options.", "");
opts.optflag("h", "help", "Print this message");
opts.optopt("", "resources-path", "Path to find static resources", "/home/servo/resources");
opts.optopt("", "content-process" , "Run as a content process and connect to the given pipe",
"servo-ipc-channel.abcdefg");
opts.optmulti("", "pref",
"A preference to set to enable", "dom.mozbrowser.enabled");
opts.optflag("b", "no-native-titlebar", "Do not use native titlebar");
let opt_match = match opts.parse(args) {
Ok(m) => m,
Err(f) => args_fail(&f.to_string()),
};
set_resources_path(opt_match.opt_str("resources-path"));
if opt_match.opt_present("h") || opt_match.opt_present("help") {
print_usage(app_name, &opts);
process::exit(0);
};
// If this is the content process, we'll receive the real options over IPC. So just fill in
// some dummy options for now.
if let Some(content_process) = opt_match.opt_str("content-process") {
MULTIPROCESS.store(true, Ordering::SeqCst);
return ArgumentParsingResult::ContentProcess(content_process);
}
let debug_string = match opt_match.opt_str("Z") {
Some(string) => string,
None => String::new()
};
let debug_options = match DebugOptions::new(&debug_string) {
Ok(debug_options) => debug_options,
Err(e) => args_fail(&format!("error: unrecognized debug option: {}", e)),
};
if debug_options.help {
print_debug_usage(app_name)
}
let cwd = env::current_dir().unwrap();
let homepage_pref = prefs::get_pref("shell.homepage");
let url_opt = if !opt_match.free.is_empty() {
Some(&opt_match.free[0][..])
} else {
homepage_pref.as_string()
};
let is_running_problem_test =
url_opt
.as_ref()
.map_or(false, |url|
url.starts_with("http://web-platform.test:8000/2dcontext/drawing-images-to-the-canvas/") ||
url.starts_with("http://web-platform.test:8000/_mozilla/mozilla/canvas/") ||
url.starts_with("http://web-platform.test:8000/_mozilla/css/canvas_over_area.html"));
let url = match url_opt {
Some(url_string) => {
parse_url_or_filename(&cwd, url_string)
.unwrap_or_else(|()| args_fail("URL parsing failed"))
},
None => {
print_usage(app_name, &opts);
args_fail("servo asks that you provide a URL")
}
};
let tile_size: usize = match opt_match.opt_str("s") {
Some(tile_size_str) => tile_size_str.parse().unwrap(),
None => 512,
};
let device_pixels_per_px = opt_match.opt_str("device-pixel-ratio").map(|dppx_str|
dppx_str.parse().unwrap()
);
let mut paint_threads: usize = match opt_match.opt_str("t") {
Some(paint_threads_str) => paint_threads_str.parse().unwrap(),
None => cmp::max(num_cpus::get() * 3 / 4, 1),
};
// If only the flag is present, default to a 5 second period for both profilers.
let time_profiler_period = opt_match.opt_default("p", "5").map(|period| {
period.parse().unwrap()
});
let mem_profiler_period = opt_match.opt_default("m", "5").map(|period| {
period.parse().unwrap()
});
let gpu_painting = !FORCE_CPU_PAINTING && opt_match.opt_present("g");
let mut layout_threads: usize = match opt_match.opt_str("y") {
Some(layout_threads_str) => layout_threads_str.parse().unwrap(),
None => cmp::max(num_cpus::get() * 3 / 4, 1),
};
let nonincremental_layout = opt_match.opt_present("i");
let mut bubble_inline_sizes_separately = debug_options.bubble_widths;
if debug_options.trace_layout {
paint_threads = 1;
layout_threads = 1;
bubble_inline_sizes_separately = true;
}
let devtools_port = opt_match.opt_default("devtools", "6000").map(|port| {
port.parse().unwrap()
});
let webdriver_port = opt_match.opt_default("webdriver", "7000").map(|port| {
port.parse().unwrap()
});
let initial_window_size = match opt_match.opt_str("resolution") {
Some(res_string) => {
let res: Vec<u32> = res_string.split('x').map(|r| r.parse().unwrap()).collect();
Size2D::typed(res[0], res[1])
}
None => {
Size2D::typed(800, 600)
}
};
if opt_match.opt_present("M") {
MULTIPROCESS.store(true, Ordering::SeqCst)
}
let user_agent = match opt_match.opt_str("u") {
Some(ref ua) if ua == "android" => default_user_agent_string(UserAgent::Android),
Some(ref ua) if ua == "gonk" => default_user_agent_string(UserAgent::Gonk),
Some(ref ua) if ua == "desktop" => default_user_agent_string(UserAgent::Desktop),
Some(ua) => ua,
None => default_user_agent_string(DEFAULT_USER_AGENT),
};
let user_stylesheets = opt_match.opt_strs("user-stylesheet").iter().map(|filename| {
let path = cwd.join(filename);
let url = Url::from_file_path(&path).unwrap();
let mut contents = Vec::new();
File::open(path)
.unwrap_or_else(|err| args_fail(&format!("Couldn’t open {}: {}", filename, err)))
.read_to_end(&mut contents)
.unwrap_or_else(|err| args_fail(&format!("Couldn’t read {}: {}", filename, err)));
(contents, url)
}).collect();
let opts = Opts {
is_running_problem_test: is_running_problem_test,
url: Some(url),
paint_threads: paint_threads,
gpu_painting: gpu_painting,
tile_size: tile_size,
device_pixels_per_px: device_pixels_per_px,
time_profiler_period: time_profiler_period,
mem_profiler_period: mem_profiler_period,
layout_threads: layout_threads,
nonincremental_layout: nonincremental_layout,
userscripts: opt_match.opt_default("userscripts", ""),
user_stylesheets: user_stylesheets,
output_file: opt_match.opt_str("o"),
replace_surrogates: debug_options.replace_surrogates,
gc_profile: debug_options.gc_profile,
load_webfonts_synchronously: debug_options.load_webfonts_synchronously,
headless: opt_match.opt_present("z"),
hard_fail: opt_match.opt_present("f"),
bubble_inline_sizes_separately: bubble_inline_sizes_separately,
profile_tasks: debug_options.profile_tasks,
profile_script_events: debug_options.profile_script_events,
profile_heartbeats: debug_options.profile_heartbeats,
trace_layout: debug_options.trace_layout,
devtools_port: devtools_port,
webdriver_port: webdriver_port,
initial_window_size: initial_window_size,
user_agent: user_agent,
multiprocess: opt_match.opt_present("M"),
sandbox: opt_match.opt_present("S"),
show_debug_borders: debug_options.show_compositor_borders,
show_debug_fragment_borders: debug_options.show_fragment_borders,
show_debug_parallel_paint: debug_options.show_parallel_paint,
show_debug_parallel_layout: debug_options.show_parallel_layout,
paint_flashing: debug_options.paint_flashing,
enable_text_antialiasing: !debug_options.disable_text_aa,
enable_canvas_antialiasing: !debug_options.disable_canvas_aa,
dump_flow_tree: debug_options.dump_flow_tree,
dump_display_list: debug_options.dump_display_list,
dump_display_list_json: debug_options.dump_display_list_json,
dump_display_list_optimized: debug_options.dump_display_list_optimized,
dump_layer_tree: debug_options.dump_layer_tree,
relayout_event: debug_options.relayout_event,
validate_display_list_geometry: debug_options.validate_display_list_geometry,
disable_share_style_cache: debug_options.disable_share_style_cache,
parallel_display_list_building: debug_options.parallel_display_list_building,
convert_mouse_to_touch: debug_options.convert_mouse_to_touch,
exit_after_load: opt_match.opt_present("x"),
no_native_titlebar: opt_match.opt_present("b"),
enable_vsync: !debug_options.disable_vsync,
};
set_defaults(opts);
// This must happen after setting the default options, since the prefs rely on
// on the resource path.
for pref in opt_match.opt_strs("pref").iter() {
prefs::set_pref(pref, PrefValue::Boolean(true));
}
ArgumentParsingResult::ChromeProcess
}
pub enum ArgumentParsingResult {
ChromeProcess,
ContentProcess(String),
}
static EXPERIMENTAL_ENABLED: AtomicBool = ATOMIC_BOOL_INIT;
/// Turn on experimental features globally. Normally this is done
/// during initialization by `set` or `from_cmdline_args`, but
/// tests that require experimental features will also set it.
pub fn set_experimental_enabled(new_value: bool) {
EXPERIMENTAL_ENABLED.store(new_value, Ordering::SeqCst);
}
// Make Opts available globally. This saves having to clone and pass
// opts everywhere it is used, which gets particularly cumbersome
// when passing through the DOM structures.
static mut DEFAULT_OPTIONS: *mut Opts = 0 as *mut Opts;
const INVALID_OPTIONS: *mut Opts = 0x01 as *mut Opts;
lazy_static! {
static ref OPTIONS: Opts = {
unsafe {
let initial = if !DEFAULT_OPTIONS.is_null() {
let opts = Box::from_raw(DEFAULT_OPTIONS);
*opts
} else {
default_opts()
};
DEFAULT_OPTIONS = INVALID_OPTIONS;
initial
}
};
}
pub fn set_defaults(opts: Opts) {
unsafe {
assert!(DEFAULT_OPTIONS.is_null());
assert!(DEFAULT_OPTIONS != INVALID_OPTIONS);
let box_opts = box opts;
DEFAULT_OPTIONS = Box::into_raw(box_opts);
}
}
#[inline]
pub fn get() -> &'static Opts {
&OPTIONS
}
pub fn parse_url_or_filename(cwd: &Path, input: &str) -> Result<Url, ()> {
match Url::parse(input) {
Ok(url) => Ok(url),
Err(url::ParseError::RelativeUrlWithoutBase) => {
Ok(Url::from_file_path(&*cwd.join(input)).unwrap())
}
Err(_) => Err(()),
}
}<|fim▁end|>
|
/// Whether we're running in multiprocess mode.
|
<|file_name|>highlight_problems.py<|end_file_name|><|fim▁begin|>import sublime
import sublime_plugin
import json
from threading import Thread
from ..lib.ycmd_handler import server
from ..lib.utils import *
from ..lib.msgs import MsgTemplates
class CppYCMHighlightProblemsListener(sublime_plugin.EventListener):
def on_selection_modified_async(self, view):
if not is_cpp(view) or view.is_scratch():
return
# Not work in st3, output panel wouldn't call this callback<|fim▁hole|> # sublime.message_dialog('match!')
# update_statusbar(view)
def on_post_save_async(self, view):
if not is_cpp(view) or view.is_scratch():
return
# run highlight problems command
if check_highlight_on_save():
view.window().run_command('cppycm_highlight_problems')<|fim▁end|>
|
# from ..commands.highlight_problems import output_panel
# if output_panel and (view.id() == output_panel.id()):
|
<|file_name|>fix_method_call_with_string_literal.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
fn f(s: String) -> String {
f(f(f("((")<caret>
}
|
<|file_name|>CreateAliasResultJsonUnmarshaller.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.gamelift.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.gamelift.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateAliasResult JSON Unmarshaller
*/
public class CreateAliasResultJsonUnmarshaller implements
Unmarshaller<CreateAliasResult, JsonUnmarshallerContext> {
public CreateAliasResult unmarshall(JsonUnmarshallerContext context)
throws Exception {
CreateAliasResult createAliasResult = new CreateAliasResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {<|fim▁hole|> context.nextToken();
createAliasResult.setAlias(AliasJsonUnmarshaller
.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createAliasResult;
}
private static CreateAliasResultJsonUnmarshaller instance;
public static CreateAliasResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateAliasResultJsonUnmarshaller();
return instance;
}
}<|fim▁end|>
|
if (context.testExpression("Alias", targetDepth)) {
|
<|file_name|>issue-8153.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that duplicate methods in impls are not allowed
struct Foo;
trait Bar {
fn bar(&self) -> isize;
}
impl Bar for Foo {
fn bar(&self) -> isize {1}
fn bar(&self) -> isize {2} //~ ERROR duplicate definitions
}
fn main() {
println!("{}", Foo.bar());
}<|fim▁end|>
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
<|file_name|>resultsource0.rs<|end_file_name|><|fim▁begin|>fn double_number(number_str: &str) -> i32 {
// Essayons d'utiliser la méthode `unwrap` pour récupérer le nombre.
// Va-t-elle nous mordre ?
2 * number_str.parse::<i32>().unwrap()
}<|fim▁hole|>
fn main() {
let twenty = double_number("10");
println!("double is {}", twenty);
let tt = double_number("t");
println!("double is {}", tt);
}<|fim▁end|>
| |
<|file_name|>File_Wm_Elements.cpp<|end_file_name|><|fim▁begin|>/* Copyright (c) MediaArea.net SARL. All Rights Reserved.
*
* Use of this source code is governed by a BSD-style license that can
* be found in the License.html file in the root of the source tree.
*/
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//
// Elements part
//
//+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
//---------------------------------------------------------------------------
// Pre-compilation
#include "MediaInfo/PreComp.h"
#ifdef __BORLANDC__
#pragma hdrstop
#endif
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
#include "MediaInfo/Setup.h"
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
#ifdef MEDIAINFO_WM_YES
//---------------------------------------------------------------------------
//---------------------------------------------------------------------------
#include "MediaInfo/Multiple/File_Wm.h"
#if defined(MEDIAINFO_VC1_YES)
#include "MediaInfo/Video/File_Vc1.h"
#endif
#if defined(MEDIAINFO_MPEGV_YES)
#include "MediaInfo/Video/File_Mpegv.h"
#endif
#if defined(MEDIAINFO_AC3_YES)
#include "MediaInfo/Audio/File_Ac3.h"
#endif
#if defined(MEDIAINFO_MPEGA_YES)
#include "MediaInfo/Audio/File_Mpega.h"
#endif
#include "MediaInfo/MediaInfo_Config_MediaInfo.h"
#if MEDIAINFO_DEMUX
#include "ThirdParty/base64/base64.h"
#endif //MEDIAINFO_DEMUX
#include "ZenLib/Utils.h"
using namespace ZenLib;
//---------------------------------------------------------------------------
namespace MediaInfoLib
{
//***************************************************************************
// Constants
//***************************************************************************
static const char* Wm_CodecList_Kind(int32u Kind)
{
switch (Kind)
{
case 0x01 : return "Video";
case 0x02 : return "Audio";
default : return "";
}
}
static const char* Wm_BannerImageData_Type(int32u Type)
{
switch (Type)
{
case 0x00 : return "";
case 0x01 : return "Bitmap";
case 0x02 : return "JPEG";
case 0x03 : return "GIF";
default : return "";
}
}
#define UUID(NAME, PART1, PART2, PART3, PART4, PART5) \
const int64u NAME =((int64u(0x##PART1))&0xFF)<<56 | ((int64u(0x##PART1)>>8)&0xFF)<<48 | ((int64u(0x##PART1)>>16)&0xFF)<<40 | ((int64u(0x##PART1)>>24)&0xFF)<<32 | ((int64u(0x##PART2))&0xFF)<<24 | ((int64u(0x##PART2)>>8)&0xFF)<<16 | ((int64u(0x##PART3))&0xFF)<<8 | ((int64u(0x##PART3)>>8)&0xFF); \
const int64u NAME##2=0x##PART4##PART5##ULL; \
namespace Elements
{
UUID(Header, 75B22630, 668E, 11CF, A6D9, 00AA0062CE6C)
UUID(Header_FileProperties, 8CABDCA1, A947, 11CF, 8EE4, 00C00C205365)
UUID(Header_StreamProperties, B7DC0791, A9B7, 11CF, 8EE6, 00C00C205365)
UUID(Header_StreamProperties_Audio, F8699E40, 5B4D, 11CF, A8FD, 00805F5C442B)
UUID(Header_StreamProperties_Video, BC19EFC0, 5B4D, 11CF, A8FD, 00805F5C442B)
UUID(Header_StreamProperties_Command, 59DACFC0, 59E6, 11D0, A3AC, 00A0C90348F6)
UUID(Header_StreamProperties_JFIF, B61BE100, 5B4E, 11CF, A8FD, 00805F5C442B)
UUID(Header_StreamProperties_DegradableJPEG, 35907DE0, E415, 11CF, A917, 00805F5C442B)
UUID(Header_StreamProperties_FileTransfer, 91BD222C, F21C, 497A, 8B6D, 5AA86BFC0185)
UUID(Header_StreamProperties_Binary, 3AFB65E2, 47EF, 40F2, AC2C, 70A90D71D343)
UUID(Header_StreamProperties_Binary_WebStreamMediaSubType, 776257D4, C627, 41CB, 8F81, 7AC7FF1C40CC)
UUID(Header_StreamProperties_Binary_WebStreamFormat, DA1E6B13, 8359, 4050, B398, 388E965BF00C)
UUID(Header_HeaderExtension, 5FBF03B5, A92E, 11CF, 8EE3, 00C00C205365)
UUID(Header_HeaderExtension_ExtendedStreamProperties, 14E6A5CB, C672, 4332, 8399, A96952065B5A)
UUID(Header_HeaderExtension_AdvancedMutualExclusion, A08649CF, 4775, 4670, 8A16, 6E35357566CD)
UUID(Header_HeaderExtension_GroupMutualExclusion, D1465A40, 5A79, 4338, B71B, E36B8FD6C249)
UUID(Header_HeaderExtension_StreamPrioritization, D4FED15B, 88D3, 454F, 81F0, ED5C45999E24)
UUID(Header_HeaderExtension_BandwidthSharing, A69609E6, 517B, 11D2, B6AF, 00C04FD908E9)
UUID(Header_HeaderExtension_LanguageList, 7C4346A9, EFE0, 4BFC, B229, 393EDE415C85)
UUID(Header_HeaderExtension_Metadata, C5F8CBEA, 5BAF, 4877, 8467, AA8C44FA4CCA)
UUID(Header_HeaderExtension_MetadataLibrary, 44231C94, 9498, 49D1, A141, 1D134E457054)
UUID(Header_HeaderExtension_IndexParameters, D6E229DF, 35DA, 11D1, 9034, 00A0C90349BE)
UUID(Header_HeaderExtension_MediaIndexParameters, 6B203BAD, 3F11, 48E4, ACA8, D7613DE2CFA7)
UUID(Header_HeaderExtension_TimecodeIndexParameters, F55E496D, 9797, 4B5D, 8C8B, 604DFE9BFB24)
UUID(Header_HeaderExtension_Compatibility, 26F18B5D, 4584, 47EC, 9F5F, 0E651F0452C9)
UUID(Header_HeaderExtension_AdvancedContentEncryption, 43058533, 6981, 49E6, 9B74, AD12CB86D58C)
UUID(Header_HeaderExtension_IndexPlaceholder, D9AADE20, 7C17, 4F9C, BC28, 8555DD98E2A2)
UUID(Header_CodecList, 86D15240, 311D, 11D0, A3A4, 00ACC90348F6)
UUID(Header_ScriptCommand, 1EFB1A30, 0B62, 11D0, A39B, 00A0C90348F6)
UUID(Header_Marker, F487CD01, A951, 11CF, 8EE6, 00C00C205365)
UUID(Header_BitRateMutualExclusion, D6E229DC, 35DA, 11D1, 9034, 00A0C90349BE)
UUID(Header_ErrorCorrection, 75B22635, 668E, 11CF, A6D9, 00AA0062CE6C)
UUID(Header_ContentDescription, 75B22633, 668E, 11CF, A6D9, 00AA0062CE6C)
UUID(Header_ExtendedContentDescription, D2D0A440, E307, 11D2, 97F0, 00A0C95EA850)
UUID(Header_StreamBitRate, 7BF875CE, 468D, 11D1, 8D82, 006097C9A2B2)
UUID(Header_ContentBranding, 2211B3FA, BD23, 11D2, B4B7, 00A0C955FC6E)
UUID(Header_ContentEncryption, 2211B3FB, BD23, 11D2, B4B7, 00A0C955FC6E)
UUID(Header_ExtendedContentEncryption, 298AE614, 2622, 4C17, B935, DAE07EE9289C)
UUID(Header_DigitalSignature, 2211B3FC, BD23, 11D2, B4B7, 00A0C955FC6E)
UUID(Header_Padding, 1806D474, CADF, 4509, A4BA, 9AABCB96AAE8)
UUID(Data, 75B22636, 668E, 11CF, A6D9, 00AA0062CE6C)
UUID(SimpleIndex, 33000890, E5B1, 11CF, 89F4, 00A0C90349CB)
UUID(Index, D6E229D3, 35DA, 11D1, 9034, 00A0C90349BE)
UUID(MediaIndex, FEB103F8, 12AD, 4C64, 840F, 2A1D2F7AD48C)
UUID(TimecodeIndex, 3CB73FD0, 0C4A, 4803, 953D, EDF7B6228F0C)
UUID(Payload_Extension_System_TimeStamp, 1135BEB7, 3A39, 478A, 98D9, 15C76B00EB69);
UUID(Mutex_Language, D6E22A00, 35DA, 11D1, 9034, 00A0C90349BE);
UUID(Mutex_Bitrate, D6E22A01, 35DA, 11D1, 9034, 00A0C90349BE);
}
static const char* Wm_StreamType(const int128u& Kind)
{
switch (Kind.hi)
{
case Elements::Header_StreamProperties_Audio : return "Audio";
case Elements::Header_StreamProperties_Video : return "Video";
case Elements::Header_StreamProperties_Command : return "Command";
case Elements::Header_StreamProperties_JFIF : return "JFIF";
case Elements::Header_StreamProperties_DegradableJPEG : return "Degradable JPEG";
case Elements::Header_StreamProperties_FileTransfer : return "File Transfer";
case Elements::Header_StreamProperties_Binary : return "Binary";
default : return "";
}
}
static const char* Wm_ExclusionType(const int128u& ExclusionType)
{
switch (ExclusionType.hi)
{
case Elements::Header_StreamProperties_Audio : return "Language";
case Elements::Header_StreamProperties_Video : return "Bitrate";
default : return "";
}
}
//***************************************************************************
// Format
//***************************************************************************
//---------------------------------------------------------------------------
// Element parse
//
void File_Wm::Data_Parse()
{
//Parsing
DATA_BEGIN
LIST(Header)
ATOM_BEGIN
ATOM(Header_FileProperties)
ATOM(Header_StreamProperties)
LIST(Header_HeaderExtension)
ATOM_BEGIN
ATOM(Header_HeaderExtension_ExtendedStreamProperties)
ATOM(Header_HeaderExtension_AdvancedMutualExclusion)
ATOM(Header_HeaderExtension_GroupMutualExclusion)
ATOM(Header_HeaderExtension_StreamPrioritization)
ATOM(Header_HeaderExtension_BandwidthSharing)
ATOM(Header_HeaderExtension_LanguageList)
ATOM(Header_HeaderExtension_Metadata)
ATOM(Header_HeaderExtension_MetadataLibrary)
ATOM(Header_HeaderExtension_IndexParameters)
ATOM(Header_HeaderExtension_MediaIndexParameters)
ATOM(Header_HeaderExtension_TimecodeIndexParameters)
ATOM(Header_HeaderExtension_Compatibility)
ATOM(Header_HeaderExtension_AdvancedContentEncryption)
ATOM(Header_HeaderExtension_IndexPlaceholder)
ATOM(Header_Padding)
ATOM_END
ATOM(Header_CodecList)
ATOM(Header_ScriptCommand)
ATOM(Header_Marker)
ATOM(Header_BitRateMutualExclusion)
ATOM(Header_ErrorCorrection)
ATOM(Header_ContentDescription)
ATOM(Header_ExtendedContentDescription)
ATOM(Header_StreamBitRate)
ATOM(Header_ContentBranding)
ATOM(Header_ContentEncryption)
ATOM(Header_ExtendedContentEncryption)
ATOM(Header_DigitalSignature)
ATOM(Header_Padding)
ATOM_END
LIST(Data)
ATOM_DEFAULT_ALONE(Data_Packet)
LIST_SKIP(SimpleIndex)
LIST_SKIP(Index)
ATOM(MediaIndex)
ATOM(TimecodeIndex)
DATA_END
}
//***************************************************************************
// Elements
//***************************************************************************
//---------------------------------------------------------------------------
void File_Wm::Header()
{
Data_Accept("Windows Media");
Element_Name("Header");
//Parsing
Skip_L4( "Number of Header Objects");
Skip_L1( "Alignment");
Skip_L1( "Architecture");
FILLING_BEGIN();
Fill(Stream_General, 0, General_Format, "Windows Media");
Header_StreamProperties_StreamOrder=0;
FILLING_END();
}
//---------------------------------------------------------------------------
void File_Wm::Header_FileProperties()
{
Element_Name("File Properties");
//Parsing
int64u CreationDate, PlayDuration, SendDuration, Preroll;
int32u Flags, MaximumBitRate;
Skip_GUID( "File ID");
Skip_L8( "File Size");
Get_L8 (CreationDate, "Creation Date"); Param_Info1(Ztring().Date_From_Milliseconds_1601(CreationDate/10000));
Skip_L8( "Data Packets Count");
Get_L8 (PlayDuration, "Play Duration"); Param_Info_From_Milliseconds(PlayDuration/10000);
Get_L8 (SendDuration, "Send Duration"); Param_Info_From_Milliseconds(SendDuration/10000);
Get_L8 (Preroll, "Preroll"); Param_Info_From_Milliseconds(Preroll);
Get_L4 (Flags, "Flags");
Skip_Flags(Flags, 0, "Broadcast");
Skip_Flags(Flags, 1, "Seekable");
Skip_Flags(Flags, 2, "Use Packet Template");
Skip_Flags(Flags, 3, "Live");
Skip_Flags(Flags, 4, "Recordable");
Skip_Flags(Flags, 5, "Unknown Data Size");
Skip_L4( "Minimum Data Packet Size");
Get_L4 (MaximumDataPacketSize, "Maximum Data Packet Size");
Get_L4 (MaximumBitRate, "Maximum Bitrate");
//Filling
if (MaximumBitRate)
Fill(Stream_General, 0, General_OverallBitRate_Maximum, MaximumBitRate);
Ztring Encoded_Date_New=Ztring().Date_From_Seconds_1601(CreationDate/10000000);
const Ztring& Encoded_Date_Old=Retrieve_Const(Stream_General, 0, General_Encoded_Date);
if (Encoded_Date_Old.empty() || Encoded_Date_New!=Encoded_Date_Old)
Fill(Stream_General, 0, General_Encoded_Date, Encoded_Date_New);
if (PlayDuration/1000>Preroll)
Fill(Stream_General, 0, General_Duration, PlayDuration/10000-Preroll);
FileProperties_Preroll=(int32u)(Preroll);
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties ()
{
Element_Name("Stream Properties");
//Parsing
int128u StreamType;
int32u StreamTypeLength, ErrorCorrectionTypeLength;
Get_GUID(StreamType, "StreamType"); Param_Info1(Wm_StreamType(StreamType)); Element_Info1(Wm_StreamType(StreamType));
Skip_GUID( "Error Correction Type");
Skip_L8( "Time Offset");
Get_L4 (StreamTypeLength, "Type-Specific Data Length");
Get_L4 (ErrorCorrectionTypeLength, "Error Correction Data Length");
Get_L2 (Stream_Number, "Stream Number");
if (Stream_Number&0x8000)
{
Param_Info1("Encrypted Content");
Stream[Stream_Number&0x007F].Info["Encryption"]=__T("Encrypted");
}
Stream_Number&=0x007F; //Only 7bits
Element_Info1(Stream_Number);
Skip_L4( "Reserved");
switch (StreamType.hi)
{
case Elements::Header_StreamProperties_Audio : Element_Begin0(); //size is StreamTypeLength
Header_StreamProperties_Audio();
Element_End0(); break;
case Elements::Header_StreamProperties_Video : Element_Begin0(); //size is StreamTypeLength
Header_StreamProperties_Video();
Element_End0(); break;
case Elements::Header_StreamProperties_JFIF : Element_Begin0(); //size is StreamTypeLength
Header_StreamProperties_JFIF();
Element_End0(); break;
case Elements::Header_StreamProperties_DegradableJPEG : Element_Begin0(); //size is StreamTypeLength
Header_StreamProperties_DegradableJPEG();
Element_End0(); break;
case Elements::Header_StreamProperties_FileTransfer :
case Elements::Header_StreamProperties_Binary : Element_Begin0(); //size is StreamTypeLength
Header_StreamProperties_Binary();
StreamKind_Last=Stream_Max; StreamPos_Last=(size_t)-1;
Element_End0(); break;
default : if (StreamTypeLength>0)
Skip_XX(StreamTypeLength, "Type-Specific Data");
StreamKind_Last=Stream_Max; StreamPos_Last=(size_t)-1;
}
if (ErrorCorrectionTypeLength)
Skip_XX(ErrorCorrectionTypeLength, "Error Correction Data");
//Filling
stream& StreamItem = Stream[Stream_Number];
StreamItem.StreamKind=StreamKind_Last;
StreamItem.StreamPos=StreamPos_Last;
StreamItem.Info["ID"].From_Number(Stream_Number);
StreamItem.Info["StreamOrder"].From_Number(Header_StreamProperties_StreamOrder);
Header_StreamProperties_StreamOrder++;
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_Audio ()
{
Element_Name("Audio");
//Parsing
int32u SamplingRate, BytesPerSec;
int16u CodecID, Channels, Data_Size, Resolution;
Get_L2 (CodecID, "Codec ID");
Get_L2 (Channels, "Number of Channels");
Get_L4 (SamplingRate, "Samples Per Second");
Get_L4 (BytesPerSec, "Average Number of Bytes Per Second");
Skip_L2( "Block Alignment");
Get_L2 (Resolution, "Bits / Sample");
Get_L2 (Data_Size, "Codec Specific Data Size");
//Filling
Stream_Prepare(Stream_Audio);
Stream[Stream_Number].IsCreated=true;
Ztring Codec; Codec.From_Number(CodecID, 16);
Codec.MakeUpperCase();
CodecID_Fill(Codec, Stream_Audio, StreamPos_Last, InfoCodecID_Format_Riff);
Fill(Stream_Audio, StreamPos_Last, Audio_Codec, Codec); //May be replaced by codec parser
Fill(Stream_Audio, StreamPos_Last, Audio_Codec_CC, Codec);
if (Channels)
Fill(Stream_Audio, StreamPos_Last, Audio_Channel_s_, Channels);
if (SamplingRate)
Fill(Stream_Audio, StreamPos_Last, Audio_SamplingRate, SamplingRate);
if (BytesPerSec)
Fill(Stream_Audio, StreamPos_Last, Audio_BitRate, BytesPerSec*8);
if (Resolution)
Fill(Stream_Audio, StreamPos_Last, Audio_BitDepth, Resolution);
FILLING_BEGIN();
//Creating the parser
if (0);
#if defined(MEDIAINFO_MPEGA_YES)
else if (MediaInfoLib::Config.CodecID_Get(Stream_Audio, InfoCodecID_Format_Riff, Ztring::ToZtring(CodecID, 16))==__T("MPEG Audio"))
{
stream& StreamItem = Stream[Stream_Number];
File_Mpega* Parser = new File_Mpega;
StreamItem.Parser= Parser;
Parser->Frame_Count_Valid=8;
StreamItem.Parser->ShouldContinueParsing=true;
}
#endif
Open_Buffer_Init(Stream[Stream_Number].Parser);
FILLING_END();
//Parsing
if (Data_Size>0)
{
Element_Begin1("Codec Specific Data");
switch (CodecID)
{
case 0x0161 :
case 0x0162 :
case 0x0163 : Header_StreamProperties_Audio_WMA(); break;
case 0x7A21 :
case 0x7A22 : Header_StreamProperties_Audio_AMR(); break;
default : Skip_XX(Data_Size, "Unknown");
}
Element_End0();
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_Audio_WMA ()
{
Element_Info1("WMA");
//Demux
#if MEDIAINFO_DEMUX
switch (Config->Demux_InitData_Get())
{
case 0 : //In demux event
Demux_Level=2; //Container
Demux(Buffer+Buffer_Offset, (size_t)Element_Size, ContentType_Header);
break;
case 1 : //In field
{
std::string Data_Raw((const char*)(Buffer+Buffer_Offset+Element_Offset), (size_t)10);//Element_Size-(Element_Offset));
std::string Data_Base64(Base64::encode(Data_Raw));
Fill(Stream_Audio, StreamPos_Last, "Demux_InitBytes", Data_Base64);
Fill_SetOptions(Stream_Audio, StreamPos_Last, "Demux_InitBytes", "N NT");
}
break;
default : ;
}
#endif //MEDIAINFO_DEMUX
//Parsing
Skip_L4( "SamplesPerBlock");
Skip_L2( "EncodeOptions");
Skip_L4( "SuperBlockAlign");
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_Audio_AMR ()
{
Element_Info1("AMR");
//Parsing
int32u Flags;
bool VBR;
Get_L4 (Flags, "Flags");
Skip_Flags(Flags, 0, "SID is used");
Get_Flags (Flags, 1, VBR, "Varying bitrate");
//Filling
Fill(Stream_Audio, StreamPos_Last, Audio_BitRate_Mode, VBR?"VBR":"CBR");
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_Video ()
{
Element_Name("Video");
//Parsing
int32u Width, Height, Compression;
int16u Data_Size, Resolution;
Get_L4 (Width, "Width");
Get_L4 (Height, "Height");
Skip_L1( "Flags");
Get_L2 (Data_Size, "Format Data Size");
Skip_L4( "Size");
Get_L4 (Width, "Width");
Get_L4 (Height, "Height");
Skip_L2( "Planes");
Get_L2 (Resolution, "BitCount");
Get_C4 (Compression, "Compression");
Skip_L4( "SizeImage");
Skip_L4( "XPelsPerMeter");
Skip_L4( "YPelsPerMeter");
Skip_L4( "ClrUsed");
Skip_L4( "ClrImportant");
//Filling
Stream_Prepare(Stream_Video);
Stream[Stream_Number].IsCreated=true;
CodecID_Fill(Ztring().From_CC4(Compression), Stream_Video, StreamPos_Last, InfoCodecID_Format_Riff);
Fill(Stream_Video, StreamPos_Last, Video_Codec, Ztring().From_CC4(Compression)); //May be replaced by codec parser
Fill(Stream_Video, StreamPos_Last, Video_Codec_CC, Ztring().From_CC4(Compression));
Fill(Stream_Video, StreamPos_Last, Video_Width, Width);
Fill(Stream_Video, StreamPos_Last, Video_Height, Height);
if (Resolution>0)
Fill(Stream_Video, StreamPos_Last, Video_BitDepth, (Resolution%3)?Resolution:(Resolution/3)); //If not a multiple of 3, the total resolution is filled
if (Compression==CC4("DVR "))
IsDvrMs=true;
//From Content description (we imagine that data is for all video streams...)
if (Header_ExtendedContentDescription_AspectRatioX && Header_ExtendedContentDescription_AspectRatioY)
{
if (Header_ExtendedContentDescription_AspectRatioX==16 && Header_ExtendedContentDescription_AspectRatioY==9)
Fill(Stream_Video, StreamPos_Last, Video_DisplayAspectRatio, ((float32)16)/9, 3);
else if (Header_ExtendedContentDescription_AspectRatioX==4 && Header_ExtendedContentDescription_AspectRatioY==3)
Fill(Stream_Video, StreamPos_Last, Video_DisplayAspectRatio, ((float32)4)/3, 3);
else
Fill(Stream_Video, StreamPos_Last, Video_PixelAspectRatio, ((float32)Header_ExtendedContentDescription_AspectRatioX)/Header_ExtendedContentDescription_AspectRatioY, 3, true);
}
//Creating the parser
if (0);
#if defined(MEDIAINFO_VC1_YES)
else if (MediaInfoLib::Config.CodecID_Get(Stream_Video, InfoCodecID_Format_Riff, Ztring().From_CC4(Compression), InfoCodecID_Format)==__T("VC-1"))
{
stream& StreamItem = Stream[Stream_Number];
File_Vc1* Parser = new File_Vc1;
StreamItem.Parser= Parser;
if (Compression==CC4("WMV3"))
{
Parser->From_WMV3=true;
Parser->MustSynchronize=false;
}
Parser->FrameIsAlwaysComplete=true; //Warning: this is not always the case, see data parsing
Open_Buffer_Init(StreamItem.Parser);
if (Data_Size>40)
{
//Demux
#if MEDIAINFO_DEMUX
switch (Config->Demux_InitData_Get())
{
case 0 : //In demux event
Element_Code=Stream_Number;
Demux_Level=2; //Container
Demux(Buffer+(size_t)Element_Offset, (size_t)(Data_Size-40), ContentType_Header);
break;
case 1 : //In field
{
std::string Data_Raw((const char*)(Buffer+(size_t)Element_Offset), (size_t)(Data_Size-40));
std::string Data_Base64(Base64::encode(Data_Raw));
Fill(Stream_Video, StreamPos_Last, "Demux_InitBytes", Data_Base64);
Fill_SetOptions(Stream_Video, StreamPos_Last, "Demux_InitBytes", "N NT");
}
break;
default : ;
}
#endif //MEDIAINFO_DEMUX
stream& StreamItem = Stream[Stream_Number];
Open_Buffer_Continue(StreamItem.Parser, (size_t)(Data_Size-40));
if (StreamItem.Parser->Status[IsFinished])
{
Finish(StreamItem.Parser);
Merge(*StreamItem.Parser, Stream_Video, 0, StreamPos_Last);
delete StreamItem.Parser; StreamItem.Parser=NULL;
}
else
{
((File_Vc1*)StreamItem.Parser)->Only_0D=true;
((File_Vc1*)StreamItem.Parser)->MustSynchronize=false;
}
}
}
#endif
#if defined(MEDIAINFO_MPEGV_YES)
else if (MediaInfoLib::Config.Codec_Get(Ztring().From_CC4(Compression), InfoCodec_KindofCodec).find(__T("MPEG-2"))==0)
{
stream& StreamItem = Stream[Stream_Number];
File_Mpegv* Parser = new File_Mpegv;
StreamItem.Parser = Parser;
Parser->Frame_Count_Valid=30; //For searching Pulldown
Open_Buffer_Init(StreamItem.Parser);
}
#endif
else if (Data_Size>40) //TODO: see "The Mummy_e"
Skip_XX(Data_Size-40, "Codec Specific Data");
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_JFIF ()
{
Element_Name("JFIF");
//Parsing
int32u Width, Height;
Get_L4 (Width, "Width");
Get_L4 (Height, "Height");
Skip_L4( "Reserved");
//Filling
Stream_Prepare(Stream_Image);
Fill(Stream_Video, StreamPos_Last, Video_Format, "JPEG");
Fill(Stream_Video, StreamPos_Last, Video_Codec, "JPEG");
Fill(Stream_Video, StreamPos_Last, Video_Width, Width);
Fill(Stream_Video, StreamPos_Last, Video_Height, Height);
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_DegradableJPEG ()
{
Element_Name("Degradable JPEG");
int32u Width, Height;
int16u InterchangeDataLength;
Get_L4 (Width, "Width");
Get_L4 (Height, "Height");
Skip_L2( "Reserved");
Skip_L2( "Reserved");
Skip_L2( "Reserved");
Get_L2 (InterchangeDataLength, "Interchange data length");
if (InterchangeDataLength>0)
Skip_XX(InterchangeDataLength, "Interchange data");
else
Skip_L1( "Zero");
//Filling
Stream_Prepare(Stream_Image);
Fill(Stream_Video, StreamPos_Last, Video_Format, "JPEG");
Fill(Stream_Video, StreamPos_Last, Video_Codec, "JPEG");
Fill(Stream_Video, StreamPos_Last, Video_Width, Width);
Fill(Stream_Video, StreamPos_Last, Video_Height, Height);
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamProperties_Binary ()
{
Element_Name("Binary");
//Parsing
int32u FormatDataLength;
Skip_GUID( "Major media type");
Skip_GUID( "Media subtype");
Skip_L4( "Fixed-size samples");
Skip_L4( "Temporal compression");
Skip_L4( "Sample size");
Skip_GUID( "Format type");
Get_L4 (FormatDataLength, "Format data size");
if (FormatDataLength>0)
Skip_XX(FormatDataLength, "Format data");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension()
{
Element_Name("Header Extension");
//Parsing
int32u Size;
Skip_GUID( "ClockType");
Skip_L2( "ClockSize");
Get_L4 (Size, "Extension Data Size");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_ExtendedStreamProperties()
{
Element_Name("Extended Stream Properties");
//Parsing
int64u AverageTimePerFrame;
int32u DataBitrate, Flags;
int16u StreamNumber, LanguageID, StreamNameCount, PayloadExtensionSystemCount;
Info_L8(StartTime, "Start Time"); Param_Info_From_Milliseconds(StartTime);
Info_L8(EndTime, "End Time"); Param_Info_From_Milliseconds(EndTime);
Get_L4 (DataBitrate, "Data Bitrate");
Skip_L4( "Buffer Size");
Skip_L4( "Initial Buffer Fullness");
Skip_L4( "Alternate Data Bitrate");
Skip_L4( "Alternate Buffer Size");
Skip_L4( "Alternate Initial Buffer Fullness");
Skip_L4( "Maximum Object Size");
Get_L4 (Flags, "Flags");
Skip_Flags(Flags, 0, "Reliable");
Skip_Flags(Flags, 1, "Seekable");
Skip_Flags(Flags, 2, "No Cleanpoints");
Skip_Flags(Flags, 3, "Resend Live Cleanpoints");
Get_L2 (StreamNumber, "Stream Number"); Element_Info1(StreamNumber);
Get_L2 (LanguageID, "Stream Language ID Index");
Get_L8 (AverageTimePerFrame, "Average Time Per Frame");
Get_L2 (StreamNameCount, "Stream Name Count");
Get_L2 (PayloadExtensionSystemCount, "Payload Extension System Count");
for (int16u Pos=0; Pos<StreamNameCount; Pos++)
{
Element_Begin1("Stream Name");
int16u StreamNameLength;
Skip_L2( "Language ID Index");
Get_L2 (StreamNameLength, "Stream Name Length");
Skip_UTF16L(StreamNameLength, "Stream Name");
Element_End0();
}
for (int16u Pos=0; Pos<PayloadExtensionSystemCount; Pos++)
{
Element_Begin1("Payload Extension System");
stream::payload_extension_system Payload_Extension_System;
int32u ExtensionSystemInfoLength;
Get_GUID(Payload_Extension_System.ID, "Extension System ID");
Get_L2 (Payload_Extension_System.Size, "Extension Data Size");
Get_L4 (ExtensionSystemInfoLength, "Extension System Info Length");
if (ExtensionSystemInfoLength>0)
Skip_XX(ExtensionSystemInfoLength, "Extension System Info");
Element_End0();
//Filling
Stream[StreamNumber].Payload_Extension_Systems.push_back(Payload_Extension_System);
}
//Header_StreamProperties
if (Element_Offset<Element_Size)
{
//This could be everything, but in theory this is only Header_StreamProperties
int128u Name;
int64u Size;
Element_Begin1("Stream Properties Object");
Element_Begin1("Header");
Get_GUID(Name, "Name");
Get_L8 (Size, "Size");
Element_End0();
if (Size>=24 && Element_Offset+Size-24==Element_Size)
{
switch (Name.hi)
{
case Elements::Header_StreamProperties : Header_StreamProperties(); break;
default : Skip_XX(Size-24, "Unknown");
}
}
else
Skip_XX(Element_Size-Element_Offset, "Problem");
Element_End0();
}
//Filling
stream& StreamItem = Stream[StreamNumber];
StreamItem.LanguageID=LanguageID;
StreamItem.AverageBitRate=DataBitrate;
StreamItem.AverageTimePerFrame=AverageTimePerFrame;
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_AdvancedMutualExclusion()
{
Element_Name("Advanced Mutual Exclusion");
//Parsing
int16u Count;
Info_GUID(ExclusionType, "Exclusion Type"); Param_Info1(Wm_ExclusionType(ExclusionType));
Get_L2 (Count, "Stream Numbers Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
Info_L2(StreamNumber, "Stream Number"); Element_Info1(StreamNumber);
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_GroupMutualExclusion()
{
Element_Name("Group Mutual Exclusion");
//Parsing
Skip_XX(Element_Size, "Unknown");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_StreamPrioritization()
{
Element_Name("Stream Prioritization");
//Parsing
int16u Count;
Get_L2 (Count, "Stream Numbers Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
int16u Flags;
Element_Begin1("Stream");
Info_L2(StreamNumber, "Stream Number"); Element_Info1(StreamNumber);
Get_L2 (Flags, "Flags");
Skip_Flags(Flags, 0, "Mandatory");
Element_End0();
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_BandwidthSharing()
{
Element_Name("Bandwidth Sharing");
//Parsing
Skip_XX(Element_Size, "Unknown");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_LanguageList()
{
Element_Name("Language List");
//Parsing
Ztring LanguageID;
int16u Count;
int8u LanguageID_Length;
Get_L2 (Count, "Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Language ID");
Get_L1 (LanguageID_Length, "Language ID Length");
if (LanguageID_Length>0)
{
Get_UTF16L(LanguageID_Length, LanguageID, "Language ID");
Element_Info1(LanguageID);
}
Element_End0();
//Filling
Languages.push_back(LanguageID);
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_MetadataLibrary()
{
Element_Name("Metadata Library");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_Metadata()
{
Element_Name("Metadata");
//Parsing
float32 AspectRatioX=0, AspectRatioY=0;
int16u Count;
Get_L2 (Count, "Description Records Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Description Record");
Ztring Name, Data;
int64u Data_Int64=0;
int32u Data_Length;
int16u StreamNumber, Name_Length, Data_Type;
Skip_L2( "Reserved");
Get_L2 (StreamNumber, "Stream Number");
Get_L2 (Name_Length, "Name Length");
Get_L2 (Data_Type, "Data Type");
Get_L4 (Data_Length, "Data Length");
Get_UTF16L(Name_Length, Name, "Name Length");
switch (Data_Type)
{
case 0x00 : Get_UTF16L(Data_Length, Data, "Data"); break;
case 0x01 : Skip_XX(Data_Length, "Data"); Data=__T("(Binary)"); break;
case 0x02 : {int16u Data_Int; Get_L2 (Data_Int, "Data"); Data=(Data_Int==0)?__T("No"):__T("Yes"); Data_Int64=Data_Int;} break;
case 0x03 : {int32u Data_Int; Get_L4 (Data_Int, "Data"); Data.From_Number(Data_Int); Data_Int64=Data_Int;} break;
case 0x04 : {int64u Data_Int; Get_L8 (Data_Int, "Data"); Data.From_Number(Data_Int); Data_Int64=Data_Int;} break;
case 0x05 : {int16u Data_Int; Get_L2 (Data_Int, "Data"); Data.From_Number(Data_Int); Data_Int64=Data_Int;} break;
default : Skip_XX(Data_Length, "Data"); Data=__T("(Unknown)"); break;
}
Element_Info1(Name);
Element_Info1(Data);
Element_End0();
if (Name==__T("IsVBR"))
Stream[StreamNumber].Info["BitRate_Mode"]=(Data_Int64==0)?"CBR":"VBR";
else if (Name==__T("AspectRatioX"))
{
AspectRatioX=Data.To_float32();
if (AspectRatioX && AspectRatioY)
Stream[StreamNumber].Info["PixelAspectRatio"].From_Number(AspectRatioX/AspectRatioY, 3);
}
else if (Name==__T("AspectRatioY"))
{
AspectRatioY=Data.To_float32();
if (AspectRatioX && AspectRatioY)
Stream[StreamNumber].Info["PixelAspectRatio"].From_Number(AspectRatioX/AspectRatioY, 3);
}
else if (Name==__T("DeviceConformanceTemplate"))
{
if (Data!=__T("@") && Data.find(__T('@'))!=std::string::npos)
Stream[StreamNumber].Info["Format_Profile"]=Data;
}
else if (Name==__T("WM/WMADRCPeakReference")) {}
else if (Name==__T("WM/WMADRCAverageReference")) {}
else if (Name==__T("WM/WMADRCAverageTarget")) {}
else if (Name==__T("WM/WMADRCPeakTarget")) {}
else
Stream[StreamNumber].Info[Name.To_Local()]=Data;
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_IndexParameters()
{
Element_Name("Index Parameters");
//Parsing
int16u Count;
Skip_L4( "Index Entry Time Interval");
Get_L2 (Count, "Index Specifiers Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Index Specifier");
int16u IndexType;
Skip_L2( "Stream Number");
Get_L2 (IndexType, "Index Type");
Element_Info1(IndexType);
Element_End0();
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_MediaIndexParameters()
{
Header_HeaderExtension_IndexParameters();
Element_Name("MediaIndex Parameters");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_TimecodeIndexParameters()
{
Header_HeaderExtension_IndexParameters();
Element_Name("Timecode Index Parameters");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_Compatibility()
{
Element_Name("Compatibility");
//Parsing
Skip_L1( "Profile");
Skip_L1( "Mode");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_AdvancedContentEncryption()
{
Element_Name("Advanced Content Encryption");
}
//---------------------------------------------------------------------------
void File_Wm::Header_HeaderExtension_IndexPlaceholder()
{
Element_Name("Index Placeholder");
}
//---------------------------------------------------------------------------
void File_Wm::Header_CodecList()
{
Element_Name("Codec List");
//Parsing
Ztring CodecName, CodecDescription;
int32u Count32;
int16u Count, Type, CodecNameLength, CodecDescriptionLength, CodecInformationLength;
Skip_GUID( "Reserved");
Get_L4 (Count32, "Codec Entries Count");
Count=(int16u)Count32;
CodecInfos.resize(Count);
for (int16u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Codec Entry");
Get_L2 (Type, "Type"); Param_Info1(Wm_CodecList_Kind(Type));
Get_L2 (CodecNameLength, "Codec Name Length");
Get_UTF16L(CodecNameLength*2, CodecName, "Codec Name");
Get_L2 (CodecDescriptionLength, "Codec Description Length");
Get_UTF16L(CodecDescriptionLength*2, CodecDescription, "Codec Description");
Get_L2 (CodecInformationLength, "Codec Information Length");
if (Type==2 && CodecInformationLength==2) //Audio and 2CC
Skip_L2( "2CC"); //Not used, we have it elsewhere
else if (Type==1 && CodecInformationLength==4) //Video and 4CC
Skip_C4( "4CC"); //Not used, we have it elsewhere
else
Skip_XX(CodecInformationLength, "Codec Information");
Element_End0();
FILLING_BEGIN();
CodecInfos[Pos].Type=Type;
CodecInfos[Pos].Info=CodecName;
if (!CodecDescription.empty())
{
CodecInfos[Pos].Info+=__T(" - ");
CodecInfos[Pos].Info+=CodecDescription;
}
Codec_Description_Count++;
FILLING_END();
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_ScriptCommand()
{
Element_Name("Script Command");
//Parsing
Skip_GUID( "Reserved");
int16u Commands_Count, CommandTypes_Count;
Get_L2 (Commands_Count, "Commands Count");
Get_L2 (CommandTypes_Count, "Command Types Count");
for (int16u Pos=0; Pos<CommandTypes_Count; Pos++)
{
Element_Begin1("Command Type");
int16u Length;
Get_L2 (Length, "Command Type Length");
if (Length>0)
Skip_UTF16L(Length*2, "Command Type");
Element_End0();
}
for (int16u Pos=0; Pos<Commands_Count; Pos++)
{
Element_Begin1("Command");
int16u Length;
Skip_L2( "Type Index");
Get_L2 (Length, "Command Length");
if (Length>0)
Skip_UTF16L(Length*2, "Command");
Element_End0();
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_Marker()
{
Element_Name("Markers");
//Parsing
Skip_GUID( "Reserved");
int32u Markers_Count;
int16u Name_Length;
Get_L4 (Markers_Count, "Markers Count");
Skip_L2( "Reserved");
Get_L2 (Name_Length, "Name Length");
if (Name_Length>0)
Skip_UTF16L(Name_Length, "Name");
//Filling
if (Markers_Count>0)
Stream_Prepare(Stream_Menu);
//Parsing
for (int32u Pos=0; Pos<Markers_Count; Pos++)
{
Element_Begin1("Marker");
Ztring Marker;
int32u Marker_Length;
Skip_L8( "Offset");
Info_L8(PresentationTime, "Presentation Time"); Param_Info_From_Milliseconds(PresentationTime/10000);
Skip_L2( "Entry Length");
Info_L4(SendTime, "Send Time"); Param_Info_From_Milliseconds(SendTime);
Skip_L4( "Flags");
Get_L4 (Marker_Length, "Marker Description Length");
if (Marker_Length>0)
Get_UTF16L(Marker_Length*2, Marker, "Marker Description");
Element_End0();
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_BitRateMutualExclusion()
{
Element_Name("BitRate Mutual Exclusion");
//Parsing
int16u Count;
Skip_GUID( "Exclusion Type");
Get_L2 (Count, "Stream Numbers Count");
for (int16u Pos=0; Pos<Count; Pos++)
Skip_L2( "Stream Number");
}
//---------------------------------------------------------------------------
void File_Wm::Header_ErrorCorrection()
{
Element_Name("Error Correction");
}
//---------------------------------------------------------------------------
void File_Wm::Header_ContentDescription()
{
Element_Name("Content Description");
//Parsing
Ztring Title, Author, Copyright, Description, Rating;
int16u TitleLength, AuthorLength, CopyrightLength, DescriptionLength, RatingLength;
Get_L2 (TitleLength, "TitleLength");
Get_L2 (AuthorLength, "AuthorLength");
Get_L2 (CopyrightLength, "CopyrightLength");
Get_L2 (DescriptionLength, "DescriptionLength");
Get_L2 (RatingLength, "RatingLength");
if (TitleLength>0)
Get_UTF16L(TitleLength, Title, "Title");
if (AuthorLength>0)
Get_UTF16L(AuthorLength, Author, "Author");
if (CopyrightLength>0)
Get_UTF16L(CopyrightLength, Copyright, "Copyright");
if (DescriptionLength>0)
Get_UTF16L(DescriptionLength, Description, "Description");
if (RatingLength>0)
Get_UTF16L(RatingLength, Rating, "Rating");
//Filling
Fill(Stream_General, 0, General_Title, Title);
Fill(Stream_General, 0, General_Performer, Author);
Fill(Stream_General, 0, General_Copyright, Copyright);
Fill(Stream_General, 0, General_Comment, Description);
Fill(Stream_General, 0, General_Rating, Rating);
}
//---------------------------------------------------------------------------
void File_Wm::Header_ExtendedContentDescription()
{
Element_Name("Extended Content Description");
//Parsing
int16u Count;
Get_L2 (Count, "Content Descriptors Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Content Descriptor");
Ztring Name, Value;
int64u Value_Int64=0;
int16u Name_Length, Value_Type, Value_Length;
Get_L2 (Name_Length, "Name Length");
Get_UTF16L(Name_Length, Name, "Name");
Get_L2 (Value_Type, "Value Data Type");
Get_L2 (Value_Length, "Value Length");
switch (Value_Type)
{
case 0x00 : Get_UTF16L(Value_Length, Value, "Value"); break;
case 0x01 :
if (Name==__T("ASFLeakyBucketPairs")) Header_ExtendedContentDescription_ASFLeakyBucketPairs(Value_Length);
else {Skip_XX(Value_Length, "Value"); Value=__T("(Binary)");} break;
case 0x02 : {int32u Value_Int; Get_L4 (Value_Int, "Value"); Value=(Value_Int==0)?__T("No"):__T("Yes"); Value_Int64=Value_Int;} break;
case 0x03 : {int32u Value_Int; Get_L4 (Value_Int, "Value"); Value.From_Number(Value_Int); Value_Int64=Value_Int;} break;
case 0x04 : {int64u Value_Int; Get_L8 (Value_Int, "Value"); Value.From_Number(Value_Int); Value_Int64=Value_Int;} break;
case 0x05 : {int16u Value_Int; Get_L2 (Value_Int, "Value"); Value.From_Number(Value_Int); Value_Int64=Value_Int;} break;
default : Skip_XX(Value_Length, "Value"); Value=__T("(Unknown)"); break;
}
Element_Info1(Name);
Element_Info1(Value);
Element_End0();
//Filling
if (!Value.empty())
{
if (Name==__T("Agility FPS")) {}
else if (Name==__T("ASFLeakyBucketPairs")) {} //Already done elsewhere
else if (Name==__T("AspectRatioX")) Header_ExtendedContentDescription_AspectRatioX=Value_Int64;
else if (Name==__T("AspectRatioY")) Header_ExtendedContentDescription_AspectRatioY=Value_Int64;
else if (Name==__T("Buffer Average")) {}
else if (Name==__T("DVR Index Granularity")) {}
else if (Name==__T("DVR File Version")) {}
else if (Name==__T("IsVBR"))
Fill(Stream_General, 0, General_OverallBitRate_Mode, Value_Int64==0?"CBR":"VBR");
else if (Name==__T("VBR Peak")) {} //Already in "Stream Bitrate" chunk
else if (Name==__T("WMFSDKVersion")) {}
else if (Name==__T("WMFSDKNeeded")) {}
else if (Name==__T("WM/AlbumTitle"))
Fill(Stream_General, 0, General_Album, Value);
else if (Name==__T("WM/AlbumArtist"))
{
const Ztring& Previous=Retrieve(Stream_General, 0, General_Performer);
if (!Previous.empty() && Previous != Value)
Fill(Stream_General, 0, General_Accompaniment, Previous); // Microsoft "Contributing artists"
Fill(Stream_General, 0, General_Performer, Value, true);
}
else if (Name==__T("WM/ArtistSortOrder"))
Fill(Stream_General, 0, General_Performer_Sort, Value);
else if (Name==__T("WM/AuthorURL"))
Fill(Stream_General, 0, "Author/Url", Value);
else if (Name==__T("WM/BeatsPerMinute"))
Fill(Stream_General, 0, General_BPM, Value);
else if (Name==__T("WM/Binary"))
Fill(Stream_General, 0, General_Cover, "Y");
else if (Name==__T("WM/Comments"))
Fill(Stream_General, 0, General_Comment, Value, true); //Clear last value
else if (Name==__T("WM/Composer"))
Fill(Stream_General, 0, General_Composer, Value);
else if (Name==__T("WM/Conductor"))
Fill(Stream_General, 0, General_Conductor, Value);
else if (Name==__T("WM/EncodedBy"))
Fill(Stream_General, 0, General_EncodedBy, Value);
else if (Name==__T("WM/EncoderSettings"))
Fill(Stream_General, 0, General_Encoded_Library_Settings, Value);
else if (Name==__T("WM/EncodingTime"))
{
Ztring Encoded_Date_New=Ztring().Date_From_Seconds_1601(Value_Int64/10000000);
const Ztring& Encoded_Date_Old=Retrieve_Const(Stream_General, 0, General_Encoded_Date);
if (Encoded_Date_Old.empty() || Encoded_Date_New!=Encoded_Date_Old)
Fill(Stream_General, 0, General_Encoded_Date, Encoded_Date_New);
}
else if (Name==__T("WM/Genre"))
Fill(Stream_General, 0, General_Genre, Value, true); //Clear last value
else if (Name==__T("WM/GenreID"))
{
if (Retrieve(Stream_General, 0, General_Genre).empty())
Fill(Stream_General, 0, General_Genre, Value);
}
else if (Name==__T("WM/Language"))
Language_ForAll=Value;
else if (Name==__T("WM/MediaCredits"))
Fill(Stream_General, 0, General_ThanksTo, Value);
else if (Name==__T("WM/MediaPrimaryClassID")) {}
else if (Name==__T("WM/MCDI")) {}
else if (Name==__T("WM/ModifiedBy"))
Fill(Stream_General, 0, General_RemixedBy, Value);
else if (Name==__T("WM/OriginalAlbumTitle"))
Fill(Stream_General, 0, "Original/Album", Value);
else if (Name==__T("WM/OriginalReleaseTime"))
Fill(Stream_General, 0, "Original/Released_Date", Value);
else if (Name==__T("WM/ParentalRating"))
Fill(Stream_General, 0, General_LawRating, Value);
else if (Name==__T("WM/ParentalRatingReason"))
Fill(Stream_General, 0, General_LawRating_Reason, Value);
else if (Name==__T("WM/Picture"))
Fill(Stream_General, 0, General_Cover, "Y");
else if (Name==__T("WM/Provider"))
Fill(Stream_General, 0, "Provider", Value);
else if (Name==__T("WM/Publisher"))
Fill(Stream_General, 0, General_Publisher, Value);
else if (Name==__T("WM/RadioStationName"))
Fill(Stream_General, 0, General_ServiceName, Value);
else if (Name==__T("WM/RadioStationOwner"))
Fill(Stream_General, 0, General_ServiceProvider, Value);
else if (Name==__T("WM/SubTitle"))
Fill(Stream_General, 0, General_Title_More, Value);
else if (Name==__T("WM/SubTitleDescription"))
Fill(Stream_General, 0, General_Title_More, Value);
else if (Name==__T("WM/ToolName"))
Fill(Stream_General, 0, General_Encoded_Application, Value);
else if (Name==__T("WM/ToolVersion"))
Fill(Stream_General, 0, General_Encoded_Application, Retrieve(Stream_General, 0, General_Encoded_Application)+__T(" ")+Value, true);
else if (Name==__T("WM/TrackNumber"))
Fill(Stream_General, 0, General_Track_Position, Value, true); //Clear last value, like WM/Track
else if (Name==__T("WM/Track"))
{
if (Retrieve(Stream_General, 0, General_Track_Position).empty())
Fill(Stream_General, 0, General_Track_Position, Value.To_int32u()+1);
}
else if (Name==__T("WM/UniqueFileIdentifier"))
{
if (Value.empty() || Value[0]!=__T(';')) //Test if there is only the separator
{
Value.FindAndReplace(__T(";"), MediaInfoLib::Config.TagSeparator_Get());
Fill(Stream_General, 0, General_UniqueID, Value);
}
}
else if (Name==__T("WM/Writer"))
Fill(Stream_General, 0, General_WrittenBy, Value);
else if (Name==__T("WM/Year"))
Fill(Stream_General, 0, General_Recorded_Date, Value);
else
Fill(Stream_General, 0, Name.To_Local().c_str(), Value);
}
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_ExtendedContentDescription_ASFLeakyBucketPairs(int16u Value_Length)
{
Element_Begin1("ASFLeakyBucketPairs");
Skip_L2( "Reserved");
for (int16u Pos=2; Pos<Value_Length; Pos+=8)
{
Element_Begin1("Bucket");
Skip_L4( "BitRate");
Skip_L4( "msBufferWindow");
Element_End0();
}
Element_End0();
}
//---------------------------------------------------------------------------
void File_Wm::Header_StreamBitRate()
{
Element_Name("Stream Bitrate");
//Parsing
int16u Count;
Get_L2 (Count, "Count");
for (int16u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Stream");
int32u AverageBitRate;
int16u StreamNumber;
Get_L2 (StreamNumber, "Stream Number"); Element_Info1(StreamNumber);
Get_L4 (AverageBitRate, "Average Bitrate"); Element_Info1(AverageBitRate);
Element_End0();
//Filling
stream& StreamItem = Stream[StreamNumber];
if (StreamItem.AverageBitRate==0) //Prefere Average bitrate of Extended Stream Properties if present
StreamItem.AverageBitRate=AverageBitRate;
}
}
//---------------------------------------------------------------------------
void File_Wm::Header_ContentBranding()
{
Element_Name("Content Branding");
//Parsing
Ztring CopyrightURL, BannerImageURL;
int32u BannerImageData_Type, BannerImageData_Length, BannerImageURL_Length, CopyrightURL_Length;
Get_L4 (BannerImageData_Type, "Banner Image Data Type"); Param_Info1(Wm_BannerImageData_Type(BannerImageData_Type));
Get_L4 (BannerImageData_Length, "Banner Image Data Length");
if (BannerImageData_Length>0)
Skip_XX(BannerImageData_Length, "Banner Image Data");
Get_L4 (BannerImageURL_Length, "Banner Image URL Length");
if (BannerImageURL_Length>0)
Get_Local(BannerImageURL_Length, BannerImageURL, "Banner Image URL");
Get_L4 (CopyrightURL_Length, "Copyright URL Length");
if (CopyrightURL_Length>0)
Get_Local(CopyrightURL_Length, CopyrightURL, "Copyright URL");
}
//---------------------------------------------------------------------------
void File_Wm::Header_ContentEncryption()
{
Element_Name("Content Encryption");
//Parsing
Ztring LicenseURL;
int32u SecretDataLength, ProtectionTypeLength, KeyIDLength, LicenseURLLength;
Get_L4 (SecretDataLength, "Secret Data Length");
Skip_XX(SecretDataLength, "Secret Data");
Get_L4 (ProtectionTypeLength, "Protection Type Length");
Skip_Local(ProtectionTypeLength, "Protection Type");
Get_L4 (KeyIDLength, "Key ID Length");
Skip_Local(KeyIDLength, "Key ID Type");
Get_L4 (LicenseURLLength, "License URL Length");
Get_Local(LicenseURLLength, LicenseURL, "License URL");
//Filling
Fill(Stream_General, 0, "Encryption", LicenseURL);
}
//---------------------------------------------------------------------------
void File_Wm::Header_ExtendedContentEncryption()
{
Element_Name("Extended Content Encryption");
//Parsing
int32u DataLength;
Get_L4 (DataLength, "Data Length");
Skip_XX(DataLength, "Data");
}
//---------------------------------------------------------------------------
void File_Wm::Header_DigitalSignature()
{
Element_Name("Digital Signature");
//Parsing
int32u DataLength;
Skip_L4( "Signature Type");
Get_L4 (DataLength, "Signature Data Length");
Skip_XX(DataLength, "Signature Data");
}
//---------------------------------------------------------------------------
void File_Wm::Header_Padding()
{
Element_Name("Padding");
//Parsing
Skip_XX(Element_Size, "Padding");
}
//---------------------------------------------------------------------------
void File_Wm::Data()
{
Element_Name("Data");
//Parsing
Skip_GUID( "File ID");
Skip_L8( "Total Data Packets");
Skip_L1( "Alignment");
Skip_L1( "Packet Alignment");
//Filling
Fill(Stream_General, 0, General_HeaderSize, File_Offset+Buffer_Offset-24);
Fill(Stream_General, 0, General_DataSize, Element_TotalSize_Get()+24);
//For each stream
Streams_Count=0;
std::map<int16u, stream>::iterator Temp=Stream.begin();
while (Temp!=Stream.end())
{
#if defined(MEDIAINFO_MPEGA_YES)
if (IsDvrMs && !Temp->second.Parser && Temp->second.AverageBitRate>=32768)
{
Temp->second.Parser=new File_Mpega; //No stream properties, trying to detect it in datas...
((File_Mpega*)Temp->second.Parser)->Frame_Count_Valid=8;
Open_Buffer_Init(Temp->second.Parser);
}
#endif
if (Temp->second.Parser || Temp->second.StreamKind==Stream_Video) //We need Stream_Video for Frame_Rate computing
{
Temp->second.SearchingPayload=true;
Streams_Count++;
}
++Temp;
}
//Enabling the alternative parser
MustUseAlternativeParser=true;
Data_AfterTheDataChunk=File_Offset+Buffer_Offset+Element_TotalSize_Get();
}
//---------------------------------------------------------------------------
void File_Wm::Data_Packet()
{
//Counting
Packet_Count++;
Element_Info1(Packet_Count);
size_t Element_Show_Count=0;
//Parsing
int32u PacketLength=0, SizeOfMediaObject=0;
int8u Flags, ErrorCorrectionData_Length, ErrorCorrectionLengthType, SequenceType, PaddingLengthType, PacketLengthType;
bool ErrorCorrectionPresent;
Element_Begin1("Error Correction");
Get_L1 (Flags, "Flags");
Get_FlagsM(Flags&0x0F, ErrorCorrectionData_Length, "Error Correction Data Length"); //4 lowest bits
Skip_Flags(Flags, 4, "Opaque Data Present");
Get_FlagsM((Flags>>5)&0x03, ErrorCorrectionLengthType, "Error Correction Length Type"); //bits 6 and 7
Get_Flags (Flags, 7, ErrorCorrectionPresent, "Error Correction Present");
if (ErrorCorrectionPresent && ErrorCorrectionLengthType==0 && ErrorCorrectionData_Length==2)
{
int8u TypeNumber;
Get_L1 (TypeNumber, "Type/Number");
Skip_FlagsM((TypeNumber>>4)&0x0F, "Type");
Skip_FlagsM( TypeNumber &0x0F, "Number");
Skip_L1( "Cycle");
}
Element_End0();
Element_Begin1("Payload Parsing Information");
Get_L1 (Flags, "Length Type Flags");
Get_Flags (Flags, 0, MultiplePayloadsPresent, "Multiple Payloads Present");
Get_FlagsM((Flags>>1)&0x3, SequenceType, "Sequence Type");
Get_FlagsM((Flags>>3)&0x3, PaddingLengthType, "Padding Length Type");
Get_FlagsM((Flags>>5)&0x3, PacketLengthType, "Packet Length Type");
Skip_Flags(Flags, 7, "Error Correction Present");
Get_L1 (Flags, "Property Flags");
Get_FlagsM( Flags &0x3, ReplicatedDataLengthType, "Replicated Data Length Type");
Get_FlagsM((Flags>>2)&0x3, OffsetIntoMediaObjectLengthType, "Offset Into Media Object Length Type");
Get_FlagsM((Flags>>4)&0x3, MediaObjectNumberLengthType, "Media Object Number Length Type");
Get_FlagsM((Flags>>6)&0x3, StreamNumberLengthType, "Stream Number Length Type");
switch (PacketLengthType)
{
case 1 : {int8u Data; Get_L1(Data, "Packet Length"); PacketLength=Data;} break;
case 2 : {int16u Data; Get_L2(Data, "Packet Length"); PacketLength=Data;} break;
case 3 : Get_L4(PacketLength, "Packet Length"); break;
default: ;
}
switch (SequenceType)
{
case 1 : Skip_L1( "Sequence"); break;
case 2 : Skip_L2( "Sequence"); break;
case 3 : Skip_L4( "Sequence"); break;
default: ;
}
switch (PaddingLengthType)
{
case 1 : {int8u Data; Get_L1(Data, "Padding Length"); Data_Parse_Padding=Data;} break;
case 2 : {int16u Data; Get_L2(Data, "Padding Length"); Data_Parse_Padding=Data;} break;
case 3 : Get_L4(Data_Parse_Padding, "Padding Length"); break;
default: Data_Parse_Padding=0;
}
Skip_L4( "Send Time");
Skip_L2( "Duration");
Element_End0();
if (MultiplePayloadsPresent)
{
//Parsing
Element_Begin1("Multiple Payloads additional flags");
int8u AdditionalFlags;
Get_L1 (AdditionalFlags, "Flags");
Get_FlagsM( AdditionalFlags &0x3F, NumberPayloads, "Number of Payloads"); //6 bits
Get_FlagsM((AdditionalFlags>>6)&0x03, PayloadLengthType, "Payload Length Type"); //bits 6 and 7
Element_End0();
}
else
{
SizeOfMediaObject=(int32u)(Element_Size-Element_Offset-Data_Parse_Padding);
NumberPayloads=1;
}
for (NumberPayloads_Pos=0; NumberPayloads_Pos<NumberPayloads; NumberPayloads_Pos++)
{
Element_Begin1("Payload");
int32u ReplicatedDataLength=0, PayloadLength=0;
int8u StreamNumber;
Get_L1 (StreamNumber, "Stream Number");
Stream_Number=StreamNumber&0x7F; //For KeyFrame
Element_Info1(Stream_Number);
switch (MediaObjectNumberLengthType)
{
case 1 : Skip_L1( "Media Object Number"); break;
case 2 : Skip_L2( "Media Object Number"); break;
case 3 : Skip_L4( "Media Object Number"); break;
default: Trusted_IsNot("Media Object Number"); return; //Problem
}
switch (OffsetIntoMediaObjectLengthType)
{
case 1 : Skip_L1( "Offset Into Media Object"); break;
case 2 : Skip_L2( "Offset Into Media Object"); break;
case 3 : Skip_L4( "Offset Into Media Object"); break;
default: Trusted_IsNot("Offset Into Media Object"); return; //Problem
}
switch (ReplicatedDataLengthType)
{
case 1 : {int8u Data; Get_L1(Data, "Replicated Data Length"); ReplicatedDataLength=Data;} break;
case 2 : {int16u Data; Get_L2(Data, "Replicated Data Length"); ReplicatedDataLength=Data;} break;
case 3 : Get_L4(ReplicatedDataLength, "Replicated Data Length"); break;
default: Trusted_IsNot("Replicated Data Length"); return; //Problem
}
if (ReplicatedDataLengthType!=0 && ReplicatedDataLength>0)
{
if (ReplicatedDataLength>=8)
{
int32u PresentationTime;
Get_L4 (SizeOfMediaObject, "Size Of Media Object");
Get_L4 (PresentationTime, "Presentation Time");
if (ReplicatedDataLength>8)
Data_Packet_ReplicatedData(ReplicatedDataLength-8);
//Presentation time delta
std::map<int16u, stream>::iterator Strea=Stream.find(Stream_Number);
if (Strea!=Stream.end() && Strea->second.StreamKind==Stream_Video)
{
Strea->second.PresentationTimes.insert(PresentationTime);
}
}
else if (ReplicatedDataLength==1)
{
Skip_L1( "Presentation Time Delta");
//TODO
}
else
{
Skip_XX(ReplicatedDataLength, "Replicated Data");
}
}
if (MultiplePayloadsPresent)
{
switch (PayloadLengthType)
{
case 1 : {int8u Data; Get_L1(Data, "Payload Length"); PayloadLength=Data;} break;
case 2 : {int16u Data; Get_L2(Data, "Payload Length"); PayloadLength=Data;} break;
case 3 : Get_L4(PayloadLength, "Payload Length"); break;
default: Trusted_IsNot("Payload Length"); return; //Problem
}
}
else if (Element_Size-Element_Offset>Data_Parse_Padding)
PayloadLength=(int32u)(Element_Size-(Element_Offset+Data_Parse_Padding));
else
{
Trusted_IsNot("Padding size problem");
}
if (Element_Offset+PayloadLength+Data_Parse_Padding>Element_Size)
{
Trusted_IsNot("Payload Length problem");
}
else
{
//Demux
Element_Code=Stream_Number;
Demux(Buffer+(size_t)Element_Offset, (size_t)PayloadLength, ContentType_MainStream);
//Analyzing
stream& StreamItem = Stream[Stream_Number];
if (StreamItem.Parser && StreamItem.SearchingPayload)
{
//Handling of spanned on multiple chunks
#if defined(MEDIAINFO_VC1_YES)
bool FrameIsAlwaysComplete=true;
#endif
if (PayloadLength!=SizeOfMediaObject)
{
if (SizeOfMediaObject_BytesAlreadyParsed==0)
SizeOfMediaObject_BytesAlreadyParsed=SizeOfMediaObject-PayloadLength;
else
SizeOfMediaObject_BytesAlreadyParsed-=PayloadLength;
if (SizeOfMediaObject_BytesAlreadyParsed==0)
Element_Show_Count++;
#if defined(MEDIAINFO_VC1_YES)
else
FrameIsAlwaysComplete=false;
#endif
}
else
Element_Show_Count++;
//Codec specific
#if defined(MEDIAINFO_VC1_YES)
if (Retrieve(StreamItem.StreamKind, StreamItem.StreamPos, Fill_Parameter(StreamItem.StreamKind, Generic_Format))==__T("VC-1"))
((File_Vc1*)StreamItem.Parser)->FrameIsAlwaysComplete=FrameIsAlwaysComplete;
#endif
Open_Buffer_Continue(StreamItem.Parser, (size_t)PayloadLength);
if (StreamItem.Parser->Status[IsFinished]
|| (StreamItem.PresentationTimes.size()>=300 && Config->ParseSpeed<1.0))
{
StreamItem.Parser->Open_Buffer_Unsynch();
StreamItem.SearchingPayload=false;
Streams_Count--;
}
Element_Show();
}
else
{
Skip_XX(PayloadLength, "Data");
if (StreamItem.SearchingPayload
&& (StreamItem.StreamKind==Stream_Video && StreamItem.PresentationTimes.size()>=300))
{
StreamItem.SearchingPayload=false;
Streams_Count--;
}
}
}
Element_End0();
}
<|fim▁hole|>
//Jumping if needed
if (Streams_Count==0 || (Packet_Count>=1000 && Config->ParseSpeed<1.0))
{
Info("Data, Jumping to end of chunk");
GoTo(Data_AfterTheDataChunk, "Windows Media");
}
if (Element_Show_Count>0)
Element_Show();
}
//---------------------------------------------------------------------------
void File_Wm::Data_Packet_ReplicatedData(int32u Size)
{
Element_Begin1("Replicated Data");
int64u Element_Offset_Final=Element_Offset+Size;
stream& StreamItem = Stream[Stream_Number];
for (size_t Pos=0; Pos<StreamItem.Payload_Extension_Systems.size(); Pos++)
{
Element_Begin0();
switch (StreamItem.Payload_Extension_Systems[Pos].ID.hi)
{
case Elements::Payload_Extension_System_TimeStamp : Data_Packet_ReplicatedData_TimeStamp(); break;
default : //Not enough info to validate this algorithm
//if (StreamItem.Payload_Extension_Systems[Pos].Size!=(int16u)-1)
//{
// Element_Name("Unknown");
// Skip_XX(StreamItem.Payload_Extension_Systems[Pos].Size, "Unknown");
//}
//else
Pos=StreamItem.Payload_Extension_Systems.size(); //Disabling the rest, all is unknown
}
Element_End0();
}
if (Element_Offset<Element_Offset_Final)
{
Element_Begin1("Other chunks");
Skip_XX(Element_Offset_Final-Element_Offset, "Unknown");
Element_End0();
}
Element_End0();
}
//---------------------------------------------------------------------------
void File_Wm::Data_Packet_ReplicatedData_TimeStamp()
{
Element_Name("TimeStamp");
//Parsing
int64u TS0;
Skip_L2( "Unknown");
Skip_L4( "Unknown");
Skip_L4( "Unknown");
Get_L8 (TS0, "TS0");
#if MEDIAINFO_TRACE
if (TS0!=(int64u)-1) Param_Info1(TS0/10000);
#endif //MEDIAINFO_TRACE
Info_L8(TS1, "TS1");
#if MEDIAINFO_TRACE
if (TS1!=(int64u)-1) Param_Info1(TS1/10000);
#endif //MEDIAINFO_TRACE
Skip_L4( "Unknown");
Skip_L4( "Unknown");
Skip_L4( "Unknown");
Skip_L4( "Unknown");
stream& StreamItem = Stream[Stream_Number];
if (StreamItem.TimeCode_First==(int64u)-1 && TS0!=(int64u)-1)
StreamItem.TimeCode_First=TS0/10000;
}
//---------------------------------------------------------------------------
void File_Wm::SimpleIndex()
{
Element_Name("Simple Index");
//Parsing
/*
int32u Count;
Skip_GUID( "File ID");
Skip_L8( "Index Entry Time Interval");
Skip_L4( "Maximum Packet Count");
Get_L4 (Count, "Index Entries Count");
for (int32u Pos=0; Pos<Count; Pos++)
{
Element_Begin1("Index Entry", 6);
int32u PacketNumber;
int16u PacketCount;
Get_L4 (PacketNumber, "Packet Number");
Get_L2 (PacketCount, "Packet Count");
Element_End0();
}
*/
Skip_XX(Element_TotalSize_Get()-Element_Offset, "Indexes");
}
//---------------------------------------------------------------------------
void File_Wm::Index()
{
Element_Name("Index");
//Parsing
/*
int32u Blocks_Count;
int16u Specifiers_Count;
Skip_L4( "Index Entry Time Interval");
Get_L2 (Specifiers_Count, "Index Specifiers Count");
Get_L4 (Blocks_Count, "Index Blocks Count");
for (int16u Pos=0; Pos<Specifiers_Count; Pos++)
{
Element_Begin1("Specifier");
Skip_L2( "Stream Number");
Skip_L2( "Index Type");
Element_End0();
}
for (int32u Pos=0; Pos<Blocks_Count; Pos++)
{
Element_Begin1("Block");
int32u Entry_Count;
Get_L4 (Entry_Count, "Index Entry Count");
Element_Begin1("Block Positions");
for (int16u Pos=0; Pos<Specifiers_Count; Pos++)
Skip_L4( "Position");
Element_End0();
for (int32u Pos=0; Pos<Entry_Count; Pos++)
{
Element_Begin1("Entry");
for (int16u Pos=0; Pos<Specifiers_Count; Pos++)
Skip_L4( "Offset");
Element_End0();
}
Element_End0();
}
*/
Skip_XX(Element_TotalSize_Get()-Element_Offset, "Indexes");
}
//---------------------------------------------------------------------------
void File_Wm::MediaIndex()
{
Element_Name("MediaIndex");
}
//---------------------------------------------------------------------------
void File_Wm::TimecodeIndex()
{
Element_Name("Timecode Index");
//Parsing
int32u TimeCode_First=(int32u)-1;
int32u IndexBlocksCount;
int16u IndexSpecifiersCount;
Skip_L4( "Reserved");
Get_L2 (IndexSpecifiersCount, "Index Specifiers Count");
Get_L4 (IndexBlocksCount, "Index Blocks Count");
Element_Begin1("Index Specifiers");
for (int16u Pos=0; Pos<IndexSpecifiersCount; ++Pos)
{
Element_Begin1("Index Specifier");
Skip_L2( "Stream Number");
Info_L2(IndexType, "Index Type");
Element_Info1(IndexType);
Element_End0();
}
Element_End0();
Element_Begin1("Index Blocks");
for (int16u Pos=0; Pos<IndexBlocksCount; ++Pos)
{
Element_Begin1("Index Block");
int32u IndexEntryCount;
Get_L4 (IndexEntryCount, "Index Entry Count");
Skip_L2( "Timecode Range");
Element_Begin1("Block Positions");
for (int16u Pos=0; Pos<IndexSpecifiersCount; ++Pos)
Skip_L8( "Block Position");
Element_End0();
Element_Begin1("Index Entries");
for (int32u Pos=0; Pos<IndexEntryCount; ++Pos)
{
Element_Begin1("Index Entry");
if (TimeCode_First==(int32u)-1)
Get_L4 (TimeCode_First, "Timecode");
else
Skip_L4( "Timecode");
for (int16u Pos=0; Pos<IndexSpecifiersCount; ++Pos)
Skip_L4( "Offsets");
Element_End0();
}
Element_End0();
Element_End0();
}
Element_End0();
FILLING_BEGIN();
Stream_Prepare(Stream_Other);
Fill(Stream_Other, StreamPos_Last, Other_Type, "Time code");
Fill(Stream_Other, StreamPos_Last, Other_Format, "WM TC");
if (TimeCode_First!=(int32u)-1)
{
int8u H1= TimeCode_First>>28;
int8u H2=(TimeCode_First>>24)&0xF;
int8u M1=(TimeCode_First>>20)&0xF;
int8u M2=(TimeCode_First>>16)&0xF;
int8u S1=(TimeCode_First>>12)&0xF;
int8u S2=(TimeCode_First>> 8)&0xF;
int8u F1=(TimeCode_First>> 4)&0xF;
int8u F2= TimeCode_First &0xF;
if (H1<10 && H2<10 && M1<10 && M2<10 && S1<10 && S2<10 && F1<10 && F2<10)
{
string TC;
TC+='0'+H1;
TC+='0'+H2;
TC+=':';
TC+='0'+M1;
TC+='0'+M2;
TC+=':';
TC+='0'+S1;
TC+='0'+S2;
TC+=':';
TC+='0'+F1;
TC+='0'+F2;
Fill(Stream_Other, StreamPos_Last, Other_TimeCode_FirstFrame, TC.c_str());
}
}
FILLING_END();
}
//***************************************************************************
// C++
//***************************************************************************
} //NameSpace
#endif //MEDIAINFO_WM_YES<|fim▁end|>
|
if (Data_Parse_Padding)
Skip_XX(Data_Parse_Padding, "Padding");
|
<|file_name|>pr-list.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>/// <reference path="./typings/app.d.ts" />
import {Component, View, ElementRef, bootstrap, FORM_DIRECTIVES} from 'angular2/angular2';
@Component({
selector: 'pr-list'
})
@View({
directives: [FORM_DIRECTIVES],
template: `
List of Pull Requests
`
})
export class PrList {
}<|fim▁end|>
| |
<|file_name|>test_lazy.py<|end_file_name|><|fim▁begin|>import functools
import re
import pytest
from autoray import do, lazy, to_numpy, infer_backend, get_dtype_name, astype
from numpy.testing import assert_allclose
from .test_autoray import BACKENDS, gen_rand<|fim▁hole|>
def test_manual_construct():
def foo(a, b, c):
a1, a2 = a
b1 = b['1']
c1, c2 = c['sub']
return do('sum', do('stack', (a1, a2, b1, c1, c2)), axis=0)
x = do('random.uniform', size=(5, 7), like='numpy')
x0 = lazy.array(x[0, :])
x1 = lazy.array(x[1, :])
x2 = lazy.array(x[2, :])
x3 = lazy.array(x[3, :])
x4 = lazy.array(x[4, :])
y = lazy.LazyArray(
backend=infer_backend(x),
fn=foo,
args=((x0, x1), {'1': x2}),
kwargs=dict(c={'sub': (x3, x4)}),
shape=(7,),
dtype='float64',
)
assert y.deps == (x0, x1, x2, x3, x4)
assert re.match(
r'x\d+ = foo\d+\(\(x\d+, x\d+,\), '
r'{1: x\d+}, c: {sub: \(x\d+, x\d+,\)}\)',
y.get_source()
)
assert_allclose(y.compute(), x.sum(0))
def modified_gram_schmidt(X):
Q = []
for j in range(0, X.shape[0]):
q = X[j, :]
for i in range(0, j):
rij = do("tensordot", do("conj", Q[i]), q, axes=1)
q = q - rij * Q[i]
rjj = do("linalg.norm", q, 2)
Q.append(q / rjj)
return do("stack", tuple(Q), axis=0)
def wrap_strict_check(larray):
fn_orig = larray._fn
@functools.wraps(fn_orig)
def checked(*args, **kwargs):
data = fn_orig(*args, **kwargs)
assert tuple(data.shape) == larray.shape
assert get_dtype_name(data) == larray.dtype
assert infer_backend(data) == larray.backend
return data
return checked
def make_strict(larray):
for node in larray:
larray._fn = wrap_strict_check(larray)
@pytest.mark.parametrize("backend", BACKENDS)
def test_lazy_mgs(backend):
if backend == "sparse":
pytest.xfail("Sparse doesn't support 'linalg.norm' yet...")
x = gen_rand((5, 5), backend)
lx = lazy.array(x)
ly = modified_gram_schmidt(lx)
make_strict(ly)
assert str(ly) == (
f"<LazyArray(fn=stack, shape=(5, 5), "
f"dtype=float64, backend='{backend}')>"
)
assert isinstance(ly, lazy.LazyArray)
hmax = ly.history_max_size()
hpeak = ly.history_peak_size()
htot = ly.history_total_size()
assert hmax == 25
assert 25 < hpeak < htot
assert len(tuple(ly)) == 57
assert len({node.fn_name for node in ly}) == 9
assert_allclose(to_numpy(ly.compute()), to_numpy(modified_gram_schmidt(x)))
with lazy.shared_intermediates():
ly = modified_gram_schmidt(lx)
make_strict(ly)
assert len(tuple(ly)) == 51
assert len({node.fn_name for node in ly}) == 9
assert_allclose(to_numpy(ly.compute()), to_numpy(modified_gram_schmidt(x)))
def test_partial_evaluation():
la = lazy.array(gen_rand((10, 10), "numpy"))
lb = lazy.array(gen_rand((10, 10), "numpy"))
lc = lazy.array(gen_rand((10, 10), "numpy"))
ld = lazy.array(gen_rand((10, 10), "numpy"))
lab = do("tanh", la @ lb)
lcd = lc @ ld
ls = lab + lcd
ld = do("abs", lab / lcd)
le = do("einsum", "ab,ba->a", ls, ld)
lf = do("sum", le)
make_strict(lf)
assert len(tuple(lf)) == 12
lf.compute_constants(variables=[lc, ld]) # constants = [la, lb]
assert len(tuple(lf)) == 9
assert "tanh" not in {node.fn_name for node in lf}
lf.compute()
def test_plot():
import matplotlib
matplotlib.use("Template")
la = lazy.array(gen_rand((10, 10), "numpy"))
lb = lazy.array(gen_rand((10, 10), "numpy"))
lc = lazy.array(gen_rand((10, 10), "numpy"))
ld = lazy.array(gen_rand((10, 10), "numpy"))
lab = do("tanh", la @ lb)
lcd = lc @ ld
ls = lab + lcd
ld = do("abs", lab / lcd)
le = do("einsum", "ab,ba->a", ls, ld)
lf = do("sum", le)
lf.plot()
lf.plot(variables=[lc, ld])
lf.plot_history_size_footprint()
def test_share_intermediates():
la = lazy.array(gen_rand((10, 10), "numpy"))
lb = lazy.array(gen_rand((10, 10), "numpy"))
l1 = do("tanh", la @ lb)
l2 = do("tanh", la @ lb)
ly = l1 + l2
assert len(tuple(ly)) == 7
y1 = ly.compute()
with lazy.shared_intermediates():
l1 = do("tanh", la @ lb)
l2 = do("tanh", la @ lb)
ly = l1 + l2
assert len(tuple(ly)) == 5
y2 = ly.compute()
assert_allclose(y1, y2)
@pytest.mark.parametrize("backend", BACKENDS)
def test_transpose_chain(backend):
lx = lazy.array(gen_rand((2, 3, 4, 5, 6), backend))
l1 = do("transpose", lx, (1, 0, 3, 2, 4))
l2 = do("transpose", l1, (1, 0, 3, 2, 4))
assert l2.args[0] is lx
assert l2.deps == (lx,)
assert len(tuple(l1)) == 2
assert len(tuple(l2)) == 2
assert_allclose(
to_numpy(lx.compute()), to_numpy(l2.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
def test_reshape_chain(backend):
lx = lazy.array(gen_rand((2, 3, 4, 5, 6), backend))
l1 = do("reshape", lx, (6, 4, 30))
l2 = do("reshape", l1, (-1,))
assert len(tuple(l1)) == 2
assert len(tuple(l2)) == 2
assert l2.args[0] is lx
assert l2.deps == (lx,)
assert_allclose(
to_numpy(lx.compute()).flatten(), to_numpy(l2.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("dtype", ["float64", "complex128"])
def test_svd(backend, dtype):
if backend == "sparse":
pytest.xfail("Sparse doesn't support 'linalg.svd' yet...")
x = lazy.array(gen_rand((4, 5), backend, dtype))
U, s, VH = do("linalg.svd", x)
assert U.shape == (4, 4)
assert s.shape == (4,)
assert VH.shape == (4, 5)
s = astype(s, dtype)
ly = U @ (do("reshape", s, (-1, 1)) * VH)
make_strict(ly)
assert_allclose(
to_numpy(x.compute()), to_numpy(ly.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
def test_qr(backend):
if backend == "sparse":
pytest.xfail("Sparse doesn't support 'linalg.qr' yet...")
x = lazy.array(gen_rand((4, 5), backend))
Q, R = do("linalg.qr", x)
assert Q.shape == (4, 4)
assert R.shape == (4, 5)
ly = Q @ R
make_strict(ly)
assert_allclose(
to_numpy(x.compute()), to_numpy(ly.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("dtype", ["float64", "complex128"])
def test_eig_inv(backend, dtype):
if backend in ("cupy", "dask", "torch", "mars", "sparse"):
pytest.xfail(f"{backend} doesn't support 'linalg.eig' yet...")
# N.B. the prob that a real gaussian matrix has all real eigenvalues is
# ``2**(-d * (d - 1) / 4)`` - see Edelman 1997 - so need ``d >> 5``
d = 20
x = lazy.array(gen_rand((d, d), backend, dtype))
el, ev = do("linalg.eig", x)
assert el.shape == (d,)
assert ev.shape == (d, d)
ly = ev @ (do("reshape", el, (-1, 1)) * do("linalg.inv", ev))
make_strict(ly)
assert_allclose(
to_numpy(x.compute()), to_numpy(ly.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("dtype", ["float64", "complex128"])
def test_eigh(backend, dtype):
if backend in ("dask", "mars", "sparse",):
pytest.xfail(f"{backend} doesn't support 'linalg.eig' yet...")
x = lazy.array(gen_rand((5, 5), backend, dtype))
x = x + x.H
el, ev = do("linalg.eigh", x)
assert get_dtype_name(ev) == dtype
assert el.shape == (5,)
assert ev.shape == (5, 5)
ly = ev @ (do("reshape", el, (-1, 1)) * ev.H)
make_strict(ly)
assert_allclose(
to_numpy(x.compute()), to_numpy(ly.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("dtype", ["float64", "complex128"])
def test_cholesky(backend, dtype):
if backend in ("sparse",):
pytest.xfail(f"{backend} doesn't support 'linalg.cholesky' yet...")
x = lazy.array(gen_rand((5, 5), backend, dtype))
x = x @ x.H
C = do("linalg.cholesky", x)
assert C.shape == (5, 5)
ly = C @ C.H
make_strict(ly)
assert_allclose(
to_numpy(x.compute()), to_numpy(ly.compute()),
)
@pytest.mark.parametrize("backend", BACKENDS)
@pytest.mark.parametrize("dtype", ["float64", "complex128"])
def test_solve(backend, dtype):
if backend in ("sparse",):
pytest.xfail(f"{backend} doesn't support 'linalg.solve' yet...")
A = lazy.array(gen_rand((5, 5), backend, dtype))
y = lazy.array(gen_rand((5,), backend, dtype))
x = do("linalg.solve", A, y)
assert x.shape == (5,)
# tensorflow e.g. doesn't allow ``A @ x`` for vector x ...
ly = do("tensordot", A, x, axes=1)
make_strict(ly)
assert_allclose(
to_numpy(y.compute()), to_numpy(ly.compute()),
)
def test_dunder_magic():
a = do('random.uniform', size=(), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3), like='numpy')
a = x * a
b = x * b
a = a * y
b = b * y
a *= z
b *= z
assert_allclose(a, b.compute())
a = do('random.uniform', size=(), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3), like='numpy')
a = x + a
b = x + b
a = a + y
b = b + y
a += z
b += z
assert_allclose(a, b.compute())
a = do('random.uniform', size=(), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3), like='numpy')
a = x - a
b = x - b
a = a - y
b = b - y
a -= z
b -= z
assert_allclose(a, b.compute())
a = do('random.uniform', size=(), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3), like='numpy')
a = x / a
b = x / b
a = a / y
b = b / y
a /= z
b /= z
assert_allclose(a, b.compute())
a = do('random.uniform', size=(), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3), like='numpy')
a = x // a
b = x // b
a = a // y
b = b // y
a //= z
b //= z
assert_allclose(a, b.compute())
a = do('random.uniform', size=(), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3), like='numpy')
a = x ** a
b = x ** b
a = a ** y
b = b ** y
a **= z
b **= z
assert_allclose(a, b.compute())
a = do('random.uniform', size=(3, 3), like='numpy')
b = lazy.array(a)
x, y, z = do('random.uniform', size=(3, 3, 3), like='numpy')
a = x @ a
b = x @ b
a = a @ y
b = b @ y
a = a @ z
b @= z
assert_allclose(a, b.compute())
def test_indexing():
a = do('random.uniform', size=(2, 3, 4, 5), like='numpy')
b = lazy.array(a)
for key in [
0,
(1, ..., -1),
(0, 1, slice(None), -2)
]:
assert_allclose(a[key], b[key].compute())
def test_einsum():
a = do('random.uniform', size=(2, 3, 4, 5), like='numpy')
b = do('random.uniform', size=(4, 5), like='numpy')
c = do('random.uniform', size=(6, 2, 3), like='numpy')
eq = 'abcd,cd,fab->fd'
x1 = do('einsum', eq, a, b, c)
la, lb, lc = map(lazy.array, (a, b, c))
x2 = do('einsum', eq, la, lb, lc)
assert_allclose(x1, x2.compute())
def test_tensordot():
a = do('random.uniform', size=(7, 3, 4, 5), like='numpy')
b = do('random.uniform', size=(5, 6, 3, 2), like='numpy')
x1 = do('tensordot', a, b, axes=[(1, 3), (2, 0)])
la, lb = map(lazy.array, (a, b))
x2 = do('tensordot', la, lb, axes=[(1, 3), (2, 0)])
assert_allclose(x1, x2.compute())
def test_use_variable_to_trace_function():
a = lazy.Variable(shape=(2, 3), backend='numpy')
b = lazy.Variable(shape=(3, 4), backend='numpy')
c = do('tanh', a @ b)
f = c.get_function([a, b])
x = do('random.uniform', size=(2, 3), like='numpy')
y = do('random.uniform', size=(3, 4), like='numpy')
z = f([x, y])
assert z.shape == (2, 4)<|fim▁end|>
| |
<|file_name|>cogroup.rs<|end_file_name|><|fim▁begin|>//! Group records by a key, and apply a reduction function.
//!
//! The `group` operators act on data that can be viewed as pairs `(key, val)`. They group records
//! with the same key, and apply user supplied functions to the key and a list of values, which are
//! expected to populate a list of output values.
//!
//! Several variants of `group` exist which allow more precise control over how grouping is done.
//! For example, the `_by` suffixed variants take arbitrary data, but require a key-value selector
//! to be applied to each record. The `_u` suffixed variants use unsigned integers as keys, and
//! will use a dense array rather than a `HashMap` to store their keys.
//!
//! The list of values are presented as an iterator which internally merges sorted lists of values.
//! This ordering can be exploited in several cases to avoid computation when only the first few
//! elements are required.
//!
//! #Examples
//!
//! This example groups a stream of `(key,val)` pairs by `key`, and yields only the most frequently
//! occurring value for each key.
//!
//! ```ignore
//! stream.group(|key, vals, output| {
//! let (mut max_val, mut max_wgt) = vals.peek().unwrap();
//! for (val, wgt) in vals {
//! if wgt > max_wgt {
//! max_wgt = wgt;
//! max_val = val;
//! }
//! }
//! output.push((max_val.clone(), max_wgt));
//! })
//! ```
use std::rc::Rc;
use std::default::Default;
use std::hash::Hasher;
use std::ops::DerefMut;
use itertools::Itertools;
use ::{Collection, Data};
use timely::dataflow::*;
use timely::dataflow::operators::{Map, Binary};
use timely::dataflow::channels::pact::Exchange;
use timely_sort::{LSBRadixSorter, Unsigned};
use collection::{LeastUpperBound, Lookup, Trace, Offset};
use collection::trace::{CollectionIterator, DifferenceIterator, Traceable};
use iterators::coalesce::Coalesce;
use collection::compact::Compact;
/// Extension trait for the `group_by` and `group_by_u` differential dataflow methods.
pub trait CoGroupBy<G: Scope, K: Data, V1: Data> where G::Timestamp: LeastUpperBound {
/// A primitive binary version of `group_by`, which acts on a `Collection<G, (K, V1)>` and a `Collection<G, (K, V2)>`.
///
/// The two streams must already be key-value pairs, which is too bad. Also, in addition to the
/// normal arguments (another stream, a hash for the key, a reduction function, and per-key logic),
/// the user must specify a function implmenting `Fn(u64) -> Look`, where `Look: Lookup<K, Offset>` is something you shouldn't have to know about yet.
/// The right thing to use here, for the moment, is `|_| HashMap::new()`.
///
/// There are better options if you know your key is an unsigned integer, namely `|x| (Vec::new(), x)`.
fn cogroup_by_inner<
D: Data,
V2: Data+Default,
V3: Data+Default,
U: Unsigned+Default,
KH: Fn(&K)->U+'static,
Look: Lookup<K, Offset>+'static,
LookG: Fn(u64)->Look,
Logic: Fn(&K, &mut CollectionIterator<DifferenceIterator<V1>>, &mut CollectionIterator<DifferenceIterator<V2>>, &mut Vec<(V3, i32)>)+'static,
Reduc: Fn(&K, &V3)->D+'static,
>
(&self, other: &Collection<G, (K, V2)>, key_h: KH, reduc: Reduc, look: LookG, logic: Logic) -> Collection<G, D>;
}
impl<G: Scope, K: Data, V1: Data> CoGroupBy<G, K, V1> for Collection<G, (K, V1)>
where G::Timestamp: LeastUpperBound {
fn cogroup_by_inner<
D: Data,
V2: Data+Default,
V3: Data+Default,
U: Unsigned+Default,
KH: Fn(&K)->U+'static,
Look: Lookup<K, Offset>+'static,
LookG: Fn(u64)->Look,
Logic: Fn(&K, &mut CollectionIterator<V1>, &mut CollectionIterator<V2>, &mut Vec<(V3, i32)>)+'static,
Reduc: Fn(&K, &V3)->D+'static,
>
(&self, other: &Collection<G, (K, V2)>, key_h: KH, reduc: Reduc, look: LookG, logic: Logic) -> Collection<G, D> {
let mut source1 = Trace::new(look(0));
let mut source2 = Trace::new(look(0));
let mut result = Trace::new(look(0));
// A map from times to received (key, val, wgt) triples.
let mut inputs1 = Vec::new();
let mut inputs2 = Vec::new();
// A map from times to a list of keys that need processing at that time.
let mut to_do = Vec::new();
// temporary storage for operator implementations to populate
let mut buffer = vec![];
let key_h = Rc::new(key_h);
let key_1 = key_h.clone();
let key_2 = key_h.clone();
// create an exchange channel based on the supplied Fn(&D1)->u64.
let exch1 = Exchange::new(move |&((ref k, _),_)| key_1(k).as_u64());
let exch2 = Exchange::new(move |&((ref k, _),_)| key_2(k).as_u64());
let mut sorter1 = LSBRadixSorter::new();
let mut sorter2 = LSBRadixSorter::new();
// fabricate a data-parallel operator using the `unary_notify` pattern.
Collection::new(self.inner.binary_notify(&other.inner, exch1, exch2, "CoGroupBy", vec![], move |input1, input2, output, notificator| {
// 1. read each input, and stash it in our staging area
while let Some((time, data)) = input1.next() {
inputs1.entry_or_insert(time.time(), || Vec::new())
.push(::std::mem::replace(data.deref_mut(), Vec::new()));
notificator.notify_at(time);
}
// 1. read each input, and stash it in our staging area
while let Some((time, data)) = input2.next() {
inputs2.entry_or_insert(time.time(), || Vec::new())
.push(::std::mem::replace(data.deref_mut(), Vec::new()));
notificator.notify_at(time);
}
// 2. go through each time of interest that has reached completion
// times are interesting either because we received data, or because we conclude
// in the processing of a time that a future time will be interesting.
while let Some((index, _count)) = notificator.next() {
let mut stash = Vec::new();
panic!("interesting times needs to do LUB of union of times for each key, input");
// 2a. fetch any data associated with this time.
if let Some(mut queue) = inputs1.remove_key(&index) {
// sort things; radix if many, .sort_by if few.
let compact = if queue.len() > 1 {
for element in queue.into_iter() {
sorter1.extend(element.into_iter(), &|x| key_h(&(x.0).0));
}
let mut sorted = sorter1.finish(&|x| key_h(&(x.0).0));
let result = Compact::from_radix(&mut sorted, &|k| key_h(k));
sorted.truncate(256);
sorter1.recycle(sorted);
result
}
else {
let mut vec = queue.pop().unwrap();
let mut vec = vec.drain(..).collect::<Vec<_>>();
vec.sort_by(|x,y| key_h(&(x.0).0).cmp(&key_h((&(y.0).0))));
Compact::from_radix(&mut vec![vec], &|k| key_h(k))
};
if let Some(compact) = compact {
for key in &compact.keys {
stash.push(index.clone());
source1.interesting_times(key, &index, &mut stash);
for time in &stash {
let mut queue = to_do.entry_or_insert((*time).clone(), || { notificator.notify_at(index.delayed(time)); Vec::new() });
queue.push((*key).clone());
}
stash.clear();
}
source1.set_difference(index.time(), compact);
}
}
// 2a. fetch any data associated with this time.
if let Some(mut queue) = inputs2.remove_key(&index) {
// sort things; radix if many, .sort_by if few.
let compact = if queue.len() > 1 {
for element in queue.into_iter() {
sorter2.extend(element.into_iter(), &|x| key_h(&(x.0).0));
}
let mut sorted = sorter2.finish(&|x| key_h(&(x.0).0));
let result = Compact::from_radix(&mut sorted, &|k| key_h(k));
sorted.truncate(256);
sorter2.recycle(sorted);
result
}
else {
let mut vec = queue.pop().unwrap();
let mut vec = vec.drain(..).collect::<Vec<_>>();
vec.sort_by(|x,y| key_h(&(x.0).0).cmp(&key_h((&(y.0).0))));
Compact::from_radix(&mut vec![vec], &|k| key_h(k))
};
if let Some(compact) = compact {
for key in &compact.keys {
stash.push(index.clone());
source2.interesting_times(key, &index, &mut stash);
for time in &stash {
let mut queue = to_do.entry_or_insert((*time).clone(), || { notificator.notify_at(index.delayed(time)); Vec::new() });
queue.push((*key).clone());
}
stash.clear();
}
source2.set_difference(index.time(), compact);
}
}
// we may need to produce output at index
let mut session = output.session(&index);
// 2b. We must now determine for each interesting key at this time, how does the
// currently reported output match up with what we need as output. Should we send
// more output differences, and what are they?
// Much of this logic used to hide in `OperatorTrace` and `CollectionTrace`.
// They are now gone and simpler, respectively.
if let Some(mut keys) = to_do.remove_key(&index) {
// we would like these keys in a particular order.
// TODO : use a radix sort since we have `key_h`.
keys.sort_by(|x,y| (key_h(&x), x).cmp(&(key_h(&y), y)));
keys.dedup();
// accumulations for installation into result
let mut accumulation = Compact::new(0,0);
for key in keys {
// acquire an iterator over the collection at `time`.
let mut input1 = source1.get_collection(&key, &index);
let mut input2 = source2.get_collection(&key, &index);
// if we have some data, invoke logic to populate self.dst
if input1.peek().is_some() || input2.peek().is_some() { logic(&key, &mut input1, &mut input2, &mut buffer); }
buffer.sort_by(|x,y| x.0.cmp(&y.0));
// push differences in to Compact.
let mut compact = accumulation.session();
for (val, wgt) in Coalesce::coalesce(result.get_collection(&key, &index)
.map(|(v, w)| (v,-w))
.merge_by(buffer.iter().map(|&(ref v, w)| (v, w)), |x,y| {
x.0 <= y.0
}))
{
session.give((reduc(&key, val), wgt));
compact.push(val.clone(), wgt);
}
compact.done(key);
buffer.clear();
}
if accumulation.vals.len() > 0 {
// println!("group2");<|fim▁hole|> }))
}
}<|fim▁end|>
|
result.set_difference(index.time(), accumulation);
}
}
}
|
<|file_name|>VersionIndependentConcept.java<|end_file_name|><|fim▁begin|>package ca.uhn.fhir.jpa.term;
/*
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2016 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and<|fim▁hole|>public class VersionIndependentConcept {
private String mySystem;
private String myCode;
public VersionIndependentConcept(String theSystem, String theCode) {
setSystem(theSystem);
setCode(theCode);
}
public String getSystem() {
return mySystem;
}
public void setSystem(String theSystem) {
mySystem = theSystem;
}
public String getCode() {
return myCode;
}
public void setCode(String theCode) {
myCode = theCode;
}
}<|fim▁end|>
|
* limitations under the License.
* #L%
*/
|
<|file_name|>crawler_wikipedia.py<|end_file_name|><|fim▁begin|>#encoding=utf-8
#------------------------------------------------------------------
# File Name: crawler_wikipedia_v0.py
# Author: yy
# Mail: [email protected]
# Date: 2014年02月12日 星期三 15时15分24秒
#-------------------------------------------------------------------
import time
import sys
import string
import urllib2
import re
import types
from bs4 import BeautifulSoup
import xml.etree.cElementTree as ET
class crawler_wikipedia:
# the start url:
startUrl = u''
# the prefix of wikipedia api string
apiPrefix = u'http://zh.wikipedia.org/w/api.php?action=query&prop=extracts&exintro&pageids='
# the surfix of wikipedia api string
apiSurfix = u'&format=xml'
# the name of mean file
MeanFileName = r'wiki.txt'
# the name of error pageids list file
ErrorListFileName = r'wiki_error.txt'
#------------------------------------------------------------
# function: get_content_helper(self,apistr)
# description: deduce the page type by content string
#
# parameter:
# self:
# apistr: string.
#
# return:
# string
#------------------------------------------------------------
def get_content_helper(self,apistr):
return u'tset the function.'
#------------------------------------------------------------
# function: get_content_by_api(self,apistr)
# description: get content by wikipedia api
#
# parameter:
# self:
# apistr: string.
#
# return:
# string
#------------------------------------------------------------<|fim▁hole|> pagecontent = urllib2.urlopen(apistr).read()
bs = BeautifulSoup(str(pagecontent))
content = bs.find('page')
if None == content:
print apistr + u' is empty!!'
return None
else:
flag_title = False
for attribute in content.attrs:
if attribute == u'title':
flag_title = True
if flag_title:
print apistr + u' has content!!'
contentStr = self.get_content_helper(apistr)
return contentStr
else:
return None
#------------------------------------------------------------
#
#------------------------------------------------------------
def main(self):
#change the default code type of sys
reload(sys)
sys.setdefaultencoding('utf-8')
#init the pageid
count = 121213#a exsit word
#get the handle of output file
outputfile = open(self.__class__.MeanFileName,'a+')
#write the working time into file
beginStr = 'begin time:\n' + time.asctime() + u'\n'
outputfile.write(beginStr)
#while(count < 2):
# #generate url
# countstr = str(count)
# currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix
# #test if have an exception
# req = urllib2.Request(currentApiStr)
# try:
# urllib2.urlopen(req)
# except urllib2.URLError,e:
# count += 1
# print e.reason
# continue
# #get content by apistr
# content = self.get_content_by_api(currentApiStr)
# print currentApiStr
# print u' '
# print content
# print u'-----------------------------------------------------'
# count += 1
# print count
countstr = str(count)
currentApiStr = self.__class__.apiPrefix + countstr + self.__class__.apiSurfix
content = self.get_content_by_api(currentApiStr)
print content
endStr = 'end time:\n' + time.asctime() + u'\n'
outputfile.write(endStr)
print currentApiStr
print u'the main function is finished!!'
outputfile.close()
#----------------------------------------------------------------
#
# program entrance
#
#----------------------------------------------------------------
print """
-----------------------------------------------------------------
a crawler on wikipedia
-----------------------------------------------------------------
content is in file:
wiki.txt
the program is working......
"""
mycrawler = crawler_wikipedia()
mycrawler.main()<|fim▁end|>
|
def get_content_by_api(self,apistr):
|
<|file_name|>EditHistoryFilterTagsToExcludeActions.js<|end_file_name|><|fim▁begin|>import {
ADD_HISTORY_FILTER_EXCLUDE_TAG,<|fim▁hole|> DISMISS_HISTORY_FILTER_EXCLUDES_TAGS,
} from 'app/configs+constants/ActionTypes';
export const saveTags = (tags = []) => ({
type: SAVE_HISTORY_FILTER_EXCLUDES_TAGS,
tags: tags,
});
export const dismissTags = () => ({ type: DISMISS_HISTORY_FILTER_EXCLUDES_TAGS });
export const cancelTags = () => ({ type: DISMISS_HISTORY_FILTER_EXCLUDES_TAGS });;
export const tappedPill = () => ({ type: REMOVE_HISTORY_FILTER_EXCLUDE_TAG });
export const addPill = () => ({ type: ADD_HISTORY_FILTER_EXCLUDE_TAG });<|fim▁end|>
|
REMOVE_HISTORY_FILTER_EXCLUDE_TAG,
SAVE_HISTORY_FILTER_EXCLUDES_TAGS,
|
<|file_name|>IRC2LCD.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python
#
# IRC2LCD
# Tim Ballas
#
"""IRC bot to display mentions on an LCD through a Parallax Propeller.
Usage: IRCbot2LCD.py <server[:port]> <channel> <nicknameToMonitor> <COMport> <optional bot nickname>
"""
#
# Modified from:
# Example program using irc.bot.
# Joel Rosdahl <[email protected]>
#
import irc.bot
import irc.strings
from irc.client import ip_numstr_to_quad, ip_quad_to_numstr
import re
import serial
import time
class IRC2LCDbot(irc.bot.SingleServerIRCBot):
def __init__(self, channel, nickname, server, port=6667):
irc.bot.SingleServerIRCBot.__init__(self, [(server, port)], nickname, nickname)
self.channel = channel
def on_nicknameinuse(self, c, e):
c.nick(BotNick)
def on_welcome(self, c, e):
c.join(self.channel)
def on_pubmsg(self, c, e):
pubmsgTemp = e.arguments[0] # e.arguments[0] is the public message we are processing, loaded into "pubmsgTemp"
pattern = re.compile(r'(.*{0}([|_][a-z0-9]+)?(\s|$).*|.*{1}([|_][a-z0-9]+)?:.*)'.format(MonitorNick,MonitorNick)) # Compile Regular Expression to check if the public message has our MonitorNick in it
result = re.search(pattern, pubmsgTemp) # Execute Regular Expression
if result: # Check to see if we matched our MonitorNick in the public message
try: # Handle error when result has 'None' in it
print result.group(1) # Print matched message to the console
MatchedMessage = str(result.group(1)) # Load matched message into "MatchedMessage" variable. Enclosing it in "str()" is to return a nice printable string.
ser.write("\r\t" + MatchedMessage) # Write "MatchedMessage" to LCD through Parallax Propeller over Serial connection. "\r\t" is command for Propeller to Clear LCD.
except: # Needed to complete 'try:' statement
pass # Do nothing and move on
def main():<|fim▁hole|>
s = sys.argv[1].split(":", 1)
server = s[0]
if len(s) == 2:
try:
port = int(s[1])
except ValueError:
print("Error: Erroneous port.")
sys.exit(1)
else:
port = 6667
channel = sys.argv[2]
nickname = sys.argv[3]
COMport = sys.argv[4]
global BotNick # Declare global variable for "BotNick"
if len(sys.argv) == 6: # If there is a argument defined for "BotNick"
BotNick = sys.argv[5] # Set "BotNick" to Argument 5(sys.argv[5])
else: # Else
BotNick = nickname + "_" # Use nickname to monitor and an underscore
global MonitorNick # Declare global variable for "MonitorNick"
MonitorNick = nickname # Set "MonitorNick" to nickname(sys.argv[3])
global ser # Declare global variable for "ser"
ser = serial.Serial(str(COMport),baudrate=9600) # Set "ser" to Serial object
bot = IRC2LCDbot(channel, nickname, server, port) # Set "bot" to IRC2LCDbot object
bot.start() # Start bot
ser.close() # Closing Serial port will prevent problems
if __name__ == "__main__":
main()<|fim▁end|>
|
import sys
if len(sys.argv) < 5:
print("Usage: IRCbot2LCD.py <server[:port]> <channel> <nicknameToMonitor> <COMport> <optional bot nickname>")
sys.exit(1)
|
<|file_name|>test_unit_tilde.py<|end_file_name|><|fim▁begin|>import pytest
from conftest import assert_bash_exec
@pytest.mark.bashcomp(cmd=None, ignore_env=r"^\+COMPREPLY=")
class TestUnitTilde:
def test_1(self, bash):
assert_bash_exec(bash, "_tilde >/dev/null")
def test_2(self, bash):
"""Test environment non-pollution, detected at teardown."""
assert_bash_exec(
bash, 'foo() { local aa="~"; _tilde "$aa"; }; foo; unset foo'
)
def test_3(self, bash):<|fim▁hole|> def _test_part_full(self, bash, part, full):
res = (
assert_bash_exec(
bash,
'_tilde "~%s"; echo "${COMPREPLY[@]}"' % part,
want_output=True,
)
.strip()
.split()
)
assert res
assert res[0] == "~%s" % full
def test_4(self, bash, part_full_user):
"""~full should complete to ~full unmodified."""
_, full = part_full_user
self._test_part_full(bash, full, full)
def test_5(self, bash, part_full_user):
"""~part should complete to ~full."""
part, full = part_full_user
self._test_part_full(bash, part, full)<|fim▁end|>
|
"""Test for https://bugs.debian.org/766163"""
assert_bash_exec(bash, "_tilde ~-o")
|
<|file_name|>test_wind_dir.py<|end_file_name|><|fim▁begin|># Simple example of reading the MCP3008 analog input channels and printing
import time
import sys
import numpy as np
# Import SPI library (for hardware SPI) and MCP3008 library.
import Adafruit_GPIO.SPI as SPI
import Adafruit_MCP3008
import RPi.GPIO as GPIO
import spidev
# Software SPI configuration:
#CLK = 18<|fim▁hole|>#MISO = 23
#MOSI = 24
#CS = 25
#mcp = Adafruit_MCP3008.MCP3008(clk=CLK, cs=CS, miso=MISO, mosi=MOSI)
# Hardware SPI configuration:
SPI_PORT = 0
SPI_DEVICE = 0
mcp = Adafruit_MCP3008.MCP3008(spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE))
# Choose channel
an_chan = 3 # channel 8 (numbered 0-7)
# choose GPIO pin
ledPin = 18
GPIO.setmode(GPIO.BCM)
GPIO.setup(ledPin,GPIO.OUT)
samplingTime = 280.0
deltaTime = 40.0
sleepTime = 9680.0
directions = {'N':3.84,'NNE':1.98,'NE':2.25,'ENE':0.41,'E':0.45,'ESE':0.32,'SE':0.90,'SSE':0.62,'S':1.40,'SSW':1.19,'SW':3.08,'WSW':2.93,'W':4.62,'WNW':4.04,'NW':4.78,'NNW':3.43}
directions = dict((v,k) for k,v in directions.iteritems())
d = [3.84,1.98,2.25,0.41,0.45,0.32,0.90,0.62,1.40,1.19,3.08,2.93,4.62,4.04,4.78,3.43]
sortd = np.sort(d)
#print sortd
midp = (sortd[1:]+sortd[:-1])/2
midp = np.insert(midp,0,0)
midp = np.insert(midp,len(midp),5.0)
print midp
#for i in range(0,len(sortd)):
# print directions.get(sortd[i])
# Main program loop.
try:
while True:
GPIO.output(ledPin,0)
time.sleep(samplingTime*10.0**-6)
# The read_adc function will get the value of the specified channel
voMeasured = mcp.read_adc(an_chan)
time.sleep(deltaTime*10.0**-6)
GPIO.output(ledPin,1)
time.sleep(sleepTime*10.0**-66)
calcVoltage = voMeasured*(5.0/1024)
c = round(calcVoltage,2)
print c
for i in range(1,len(midp)-1):
b = midp[i-1]
en = midp[i+1]
if c > 3.90 and c < 3.95:
direction = 4.78
break
elif c > b and c < en:
direction = sortd[i]
break
#dustDensity = 0.17*calcVoltage-0.1
#if dustDensity < 0:
# dustDensity = 0.00
# Print the ADC values.
print "Raw signal value (0 - 1023): ", voMeasured
print "Voltage: ", c, direction, directions.get(direction)
#print "Dust Density: ", dustDensity
time.sleep(1)
except KeyboardInterrupt:
GPIO.cleanup()<|fim▁end|>
| |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) 2013 Canonical
#
# Authors:
# Didier Roche<|fim▁hole|># Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
from contextlib import contextmanager
# this is stolen from python 3.4 :)
@contextmanager
def ignored(*exceptions):
try:
yield
except exceptions:
pass<|fim▁end|>
|
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
|
<|file_name|>setFilterParameters.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for ag-grid v3.3.3
// Project: http://www.ag-grid.com/
// Definitions by: Niall Crosby <https://github.com/ceolter/>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
export interface SetFilterParameters {
/** Same as cell renderer for grid (you can use the same one in both locations). Setting it separatly here allows for the value to be rendered differently in the filter. */
cellRenderer?: Function;
/** The height of the cell. */
cellHeight?: number;
/** The values to display in the filter. */
values?: any;
/** What to do when new rows are loaded. The default is to reset the filter, as the set of values to select from can have changed. If you want to keep the selection, then set this value to 'keep'. */<|fim▁hole|> comparator?: (a: any, b: any) => number;
}<|fim▁end|>
|
newRowsAction?: string;
/** If true, the filter will not remove items that are no longer availabe due to other filters. */
suppressRemoveEntries?: boolean;
/** Comparator for sorting. If not provided, the colDef comparator is used. If colDef also not provided, the default (agGrid provided) comparator is used.*/
|
<|file_name|>FileUtils.py<|end_file_name|><|fim▁begin|># -*- coding: iso-8859-1 -*-
#
# This file is part of GNU Enterprise.
#
# GNU Enterprise is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either
# version 2, or (at your option) any later version.
#
# GNU Enterprise is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with program; see the file COPYING. If not,
# write to the Free Software Foundation, Inc., 59 Temple Place
# - Suite 330, Boston, MA 02111-1307, USA.
#
# Copyright 2001-2007 Free Software Foundation
#
# FILE:
# FileUtils.py
#
# DESCRIPTION:
# Common file/url/resource related utilities
#
# NOTES:
# TODO: Deprecate
import os<|fim▁hole|>import urllib
import urlparse
import sys
import cStringIO
# For backwards compatability
from gnue.common.utils.importing import import_string as dyn_import
from gnue.common.utils.file import to_uri as urlize, \
open_uri as openResource, \
to_buffer as openBuffer<|fim▁end|>
| |
<|file_name|>ret-bang.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// -*- rust -*-
fn my_err(s: ~str) -> ! { error2!("{:?}", s); fail2!(); }
<|fim▁hole|>pub fn main() { okay(4u); }<|fim▁end|>
|
fn okay(i: uint) -> int {
if i == 3u { my_err(~"I don't like three"); } else { return 42; }
}
|
<|file_name|>SASLOutputStream.java<|end_file_name|><|fim▁begin|>/*
* @(#)file SASLOutputStream.java
* @(#)author Sun Microsystems, Inc.
* @(#)version 1.10
* @(#)lastedit 07/03/08
* @(#)build @BUILD_TAG_PLACEHOLDER@
*
*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright (c) 2007 Sun Microsystems, Inc. All Rights Reserved.
*
* The contents of this file are subject to the terms of either the GNU General
* Public License Version 2 only ("GPL") or the Common Development and
* Distribution License("CDDL")(collectively, the "License"). You may not use
* this file except in compliance with the License. You can obtain a copy of the
* License at http://opendmk.dev.java.net/legal_notices/licenses.txt or in the
* LEGAL_NOTICES folder that accompanied this code. See the License for the
* specific language governing permissions and limitations under the License.
*
* When distributing the software, include this License Header Notice in each
* file and include the License file found at
* http://opendmk.dev.java.net/legal_notices/licenses.txt
* or in the LEGAL_NOTICES folder that accompanied this code.
* Sun designates this particular file as subject to the "Classpath" exception
* as provided by Sun in the GPL Version 2 section of the License file that
* accompanied this code.
*
* If applicable, add the following below the License Header, with the fields
* enclosed by brackets [] replaced by your own identifying information:
*
* "Portions Copyrighted [year] [name of copyright owner]"
*
* Contributor(s):
*
* If you wish your version of this file to be governed by only the CDDL or
* only the GPL Version 2, indicate your decision by adding
*
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license."
*
* If you don't indicate a single choice of license, a recipient has the option
* to distribute your version of this file under either the CDDL or the GPL
* Version 2, or to extend the choice of license to its licensees as provided
* above. However, if you add GPL Version 2 code and therefore, elected the
* GPL Version 2 license, then the option applies only if the new code is made
* subject to such option by the copyright holder.
*
*/
package com.sun.jmx.remote.opt.security;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslServer;
import java.io.IOException;
import java.io.OutputStream;
import com.sun.jmx.remote.opt.util.ClassLogger;
public class SASLOutputStream extends OutputStream {
private int rawSendSize = 65536;
private byte[] lenBuf = new byte[4]; // buffer for storing length
private OutputStream out; // underlying output stream
private SaslClient sc;
private SaslServer ss;
public SASLOutputStream(SaslClient sc, OutputStream out)
throws IOException {
super();
this.out = out;
this.sc = sc;
this.ss = null;
String str = (String) sc.getNegotiatedProperty(Sasl.RAW_SEND_SIZE);
if (str != null) {
try {
rawSendSize = Integer.parseInt(str);
} catch (NumberFormatException e) {
throw new IOException(Sasl.RAW_SEND_SIZE +
" property must be numeric string: " + str);
}
}
}
public SASLOutputStream(SaslServer ss, OutputStream out)
throws IOException {
super();
this.out = out;
this.ss = ss;
this.sc = null;
String str = (String) ss.getNegotiatedProperty(Sasl.RAW_SEND_SIZE);
if (str != null) {
try {
rawSendSize = Integer.parseInt(str);
} catch (NumberFormatException e) {
throw new IOException(Sasl.RAW_SEND_SIZE +
" property must be numeric string: " + str);
}
}
}
public void write(int b) throws IOException {
byte[] buffer = new byte[1];
buffer[0] = (byte)b;
write(buffer, 0, 1);
}
public void write(byte[] buffer, int offset, int total) throws IOException {
int count;
byte[] wrappedToken, saslBuffer;
// "Packetize" buffer to be within rawSendSize
if (logger.traceOn()) {
logger.trace("write", "Total size: " + total);
}
for (int i = 0; i < total; i += rawSendSize) {
// Calculate length of current "packet"
count = (total - i) < rawSendSize ? (total - i) : rawSendSize;
// Generate wrapped token
if (sc != null)
wrappedToken = sc.wrap(buffer, offset+i, count);
else
wrappedToken = ss.wrap(buffer, offset+i, count);
// Write out length
intToNetworkByteOrder(wrappedToken.length, lenBuf, 0, 4);
if (logger.traceOn()) {
logger.trace("write", "sending size: " + wrappedToken.length);
}
out.write(lenBuf, 0, 4);
// Write out wrapped token<|fim▁hole|> public void close() throws IOException {
if (sc != null)
sc.dispose();
else
ss.dispose();
out.close();
}
/**
* Encodes an integer into 4 bytes in network byte order in the buffer
* supplied.
*/
private void intToNetworkByteOrder(int num, byte[] buf,
int start, int count) {
if (count > 4) {
throw new IllegalArgumentException("Cannot handle more " +
"than 4 bytes");
}
for (int i = count-1; i >= 0; i--) {
buf[start+i] = (byte)(num & 0xff);
num >>>= 8;
}
}
private static final ClassLogger logger =
new ClassLogger("javax.management.remote.misc", "SASLOutputStream");
}<|fim▁end|>
|
out.write(wrappedToken, 0, wrappedToken.length);
}
}
|
<|file_name|>TableDialog.js<|end_file_name|><|fim▁begin|>define(
({
insertTableTitle: "Insertar tabla",
modifyTableTitle: "Modificar tabla",
rows: "Filas:",
columns: "Columnas:",
align: "Alinear:",
cellPadding: "Relleno de celda:",
cellSpacing: "Espaciado de celda:",
tableWidth: "Ancho de tabla:",
backgroundColor: "Color de fondo:",
borderColor: "Color de borde:",
borderThickness: "Grosor del borde:",<|fim▁hole|> percent: "por ciento",
pixels: "píxeles",
"default": "default",
left: "izquierda",
center: "centro",
right: "derecha",
buttonSet: "Establecer", // translated elsewhere?
buttonInsert: "Insertar",
buttonCancel: "Cancelar",
selectTableLabel: "Seleccionar tabla",
insertTableRowBeforeLabel: "Añadir fila antes",
insertTableRowAfterLabel: "Añadir fila después",
insertTableColumnBeforeLabel: "Añadir columna antes",
insertTableColumnAfterLabel: "Añadir columna después",
deleteTableRowLabel: "Suprimir fila",
deleteTableColumnLabel: "Suprimir columna"
})
);<|fim▁end|>
| |
<|file_name|>middleware.test.js<|end_file_name|><|fim▁begin|>import { LOCAL_STORAGE_REMOVE_ITEM, LOCAL_STORAGE_SET_ITEM } from './actionTypes'
import createMiddleware from './middleware'
describe('middleware', () => {
let removeItem
let setItem
let middleware
let next
let store
beforeEach(() => {<|fim▁hole|> removeItem,
setItem,
})
next = jest.fn()
store = {
dispatch: jest.fn(),
getState: jest.fn(),
}
})
it('calls next on dummy actionType', () => {
const action = {
type: 'dummyType',
payload: {
key: 'key',
},
}
middleware(store)(next)(action)
expect(next.mock.calls.length).toBe(1)
})
it(`calls removeItem on ${LOCAL_STORAGE_REMOVE_ITEM}`, () => {
const action = {
type: LOCAL_STORAGE_REMOVE_ITEM,
payload: {
key: 'key',
},
}
middleware(store)(next)(action)
expect(removeItem.mock.calls.length).toBe(1)
})
it(`calls removeItem on ${LOCAL_STORAGE_SET_ITEM}`, () => {
const action = {
type: LOCAL_STORAGE_SET_ITEM,
payload: {
key: 'key',
value: 'value',
},
}
middleware(store)(next)(action)
expect(setItem.mock.calls.length).toBe(1)
})
})<|fim▁end|>
|
removeItem = jest.fn()
setItem = jest.fn()
middleware = createMiddleware({
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::fs;
use std::path;
use std::io::Read;
use serialize::json;
use valico::json_schema;
fn visit_specs<F>(dir: &path::Path, cb: F) where F: Fn(&path::Path, json::Json) {
let contents = fs::read_dir(dir).ok().unwrap();
for entry in contents {
let path = entry.unwrap().path();
match fs::File::open(&path) {
Err(_) => continue,
Ok(mut file) => {
let metadata = file.metadata().unwrap();
if metadata.is_file() {
let mut content = String::new();
file.read_to_string(&mut content).ok().unwrap();
let json: json::Json = content.parse().unwrap();
cb(&path, json);
}
}
}
}
}
#[test]
fn test_suite() {
let mut content = String::new();
fs::File::open(&path::Path::new("tests/schema/schema.json")).ok().unwrap()
.read_to_string(&mut content).ok().unwrap();
let json_v4_schema: json::Json = content.parse().unwrap();
visit_specs(&path::Path::new("tests/schema/JSON-Schema-Test-Suite/tests/draft4"), |path, spec_set: json::Json| {
let spec_set = spec_set.as_array().unwrap();
let exceptions: Vec<(String, String)> = vec![
("maxLength.json".to_string(), "two supplementary Unicode code points is long enough".to_string()),
("minLength.json".to_string(), "one supplementary Unicode code point is not long enough".to_string()),
("refRemote.json".to_string(), "remote ref invalid".to_string()),
("refRemote.json".to_string(), "remote fragment invalid".to_string()),
("refRemote.json".to_string(), "ref within ref invalid".to_string()),
("refRemote.json".to_string(), "changed scope ref invalid".to_string()),
];
for spec in spec_set.iter() {
let spec = spec.as_object().unwrap();
let mut scope = json_schema::Scope::new();
scope.compile(json_v4_schema.clone(), true).ok().unwrap();
let schema = match scope.compile_and_return(spec.get("schema").unwrap().clone(), false) {
Ok(schema) => schema,
Err(err) => panic!("Error in schema {} {}: {:?}",
path.file_name().unwrap().to_str().unwrap(),
spec.get("description").unwrap().as_string().unwrap(),
err
)
};
let tests = spec.get("tests").unwrap().as_array().unwrap();
for test in tests.iter() {
let test = test.as_object().unwrap();
let description = test.get("description").unwrap().as_string().unwrap();
let data = test.get("data").unwrap();
let valid = test.get("valid").unwrap().as_boolean().unwrap();
<|fim▁hole|> if !&exceptions[..].contains(&(path.file_name().unwrap().to_str().unwrap().to_string(), description.to_string())) {
panic!("Failure: \"{}\" in {}",
path.file_name().unwrap().to_str().unwrap(),
description.to_string());
}
} else {
println!("test json_schema::test_suite -> {} .. ok", description);
}
}
}
})
}<|fim▁end|>
|
let state = schema.validate(&data);
if state.is_valid() != valid {
|
<|file_name|>cli.rs<|end_file_name|><|fim▁begin|>use clap::{App, AppSettings, Arg, SubCommand};
use std::path::Path;
pub fn app<'a, 'b>() -> App<'a, 'b> {
App::new("homers")
.version(crate_version!())
.author("Jacob Helwig <[email protected]>")
.about("Manage dotfiles in your home directory.")
.setting(AppSettings::SubcommandRequiredElseHelp)
.setting(AppSettings::VersionlessSubcommands)
.arg(Arg::with_name("quiet")
.short("q")
.long("quiet")
.help("Suppress output"))
.arg(Arg::with_name("verbose")
.short("v")
.long("verbose")
.multiple(true)
.overrides_with("quiet")
.help("Sets the level of verbosity"))
.arg(Arg::with_name("pretend")
.short("p")
.long("pretend")
.help("Show what would be done, instead of making changes"))
.arg(Arg::with_name("homers-dir")
.short("d")
.long("homers-dir")
.help("Path to where the repositories are stored (Default: ~/.homers)")
.takes_value(true)
.value_name("dir")
.validator(|val: String| -> Result<(), String> {
if Path::new(&val).exists() {
Ok(())
} else {
Err(format!("Path does not exist for --homers-dir: {}", val))
}
}))
.subcommand(command_cd())
.subcommand(command_clone())
.subcommand(command_commit())
.subcommand(command_destroy())
.subcommand(command_diff())
.subcommand(command_edit())
.subcommand(command_exec())
.subcommand(command_exec_all())
.subcommand(command_generate())
.subcommand(command_link())
.subcommand(command_list())
.subcommand(command_path())
.subcommand(command_pull())
.subcommand(command_push())
.subcommand(command_rc())
.subcommand(command_status())
.subcommand(command_track())
.subcommand(command_unlink())
}
fn command_cd<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("cd")
.about("Open a shell at the root of the given repository")
.arg(Arg::with_name("repository")
.help("The repository to open a shell in")
.index(1)
.required(true))
}
fn command_clone<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("clone")
.about("Clone <uri>")
.arg(Arg::with_name("uri")
.help("URI of the repository to clone")
.index(1)
.required(true))
.arg(Arg::with_name("name")
.help("The name to use for the repository locally")
.index(2)
.required(true))
}
fn command_commit<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("commit")
.about("Comment changes in the given repository")
.arg(Arg::with_name("repository")
.help("Name of the repository in which to commit changes")
.index(1)
.required(true))
.arg(Arg::with_name("message")
.help("The commit message to use")
.index(2)
.required(true))
}
fn command_destroy<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("destroy")
.about("Delete all symlinks and remove the repository")
.arg(Arg::with_name("repository")
.help("Name of the repository to destroy")
.index(1)
.required(true))
}
fn command_diff<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("diff")
.about("Show the \"git diff\" of uncommitted changes in the repository")
.arg(Arg::with_name("repository")
.help("Name of the repository to \"git diff\"")
.index(1)
.required(true))
}
fn command_edit<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("edit")
.about("Open $EDITOR in the root of the given repository")
.arg(Arg::with_name("repository")
.help("The repository to open in $EDITOR")
.index(1)
.required(true))
}
fn command_exec<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("exec")
.about("Execute the given command in the root of the specified repository")
.arg(Arg::with_name("repository")
.help("Name of the repository in which to execute the command")
.index(1)
.required(true))
.arg(Arg::with_name("command")
.help("The command to execute")
.index(2)
.required(true))
}
fn command_exec_all<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("exec_all")
.about("Execute the given command in the root of the specified repository")
.arg(Arg::with_name("command")
.help("The command to execute")
.index(1)
.required(true))
}
fn command_generate<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("generate")
.about("Create a homers compatible git repository at the specified path")
.arg(Arg::with_name("path")
.help("The path to initialize with a new repository")
.index(1)
.required(true))
}
fn command_link<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("link")
//Runtime options:
//-f, [--force] # Overwrite files that already exist
//-s, [--skip], [--no-skip] # Skip files that already exist
.about("Symlink all dotfiles from the specified repository")
.arg(Arg::with_name("repository")
.help("The repository to create the symlinks for")
.index(1)
.required(true))
}
fn command_list<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("list").about("List all cloned repositories")
}
fn command_path<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("path")
.about("Print the path to the specified repository")
.arg(Arg::with_name("repository")
.help("The name of the repository to show the path to")
.index(1)
.required(true))
}
fn command_pull<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("pull")
.about("Run \"git pull\" in the specified repository")
.arg(Arg::with_name("repository")
.help("The name of the repository to run \"git pull\" in")
.index(1)
.required(true))
}
fn command_push<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("push")
.about("Run \"git push\" in the specified repository")
.arg(Arg::with_name("repository")
.help("The name of the repository to run \"git push\" in")
.index(1)
.required(true))
}
fn command_rc<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("rc")
.about("Run the .homersrc from the specified repository")
.arg(Arg::with_name("repository")
.help("The name of the repository to run the .homersrc from")
.index(1)
.required(true))
}
fn command_status<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("status")
.about("Run \"git status\" in the specified repository")
.arg(Arg::with_name("repository")
.help("The name of the repository to run \"git status\" in")
.index(1)
.required(true))
}
fn command_track<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("track")
.about("Add a file to the specified repository")
.arg(Arg::with_name("path")
.help("The path to add to the repository and make a symlink")
.index(1)
.required(true))<|fim▁hole|> .index(2)
.required(true))
}
fn command_unlink<'a, 'b>() -> App<'a, 'b> {
SubCommand::with_name("unlink")
.about("Remove all symlinks to the specified repository")
.arg(Arg::with_name("repository")
.help("The name of the repository to remove the symlinks for")
.index(1)
.required(true))
}<|fim▁end|>
|
.arg(Arg::with_name("repository")
.help("The name of the repository to add the path to")
|
<|file_name|>startPythonFromMsg.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import sys
from LAPS.MsgBus.Bus import Bus
# Create queue with a unique name<|fim▁hole|>
# insert message
# receive msg
# delete queue
if __name__ == "__main__":
# If invoked directly, parse command line arguments for logger information
# and pass the rest to the run() method defined above
# --------------------------------------------------------------------------
try:
unique_queue_name = sys.argv[1]
except:
print "Not enough command line arguments: this test needs a unique queue name"
exit(1)
#msgbus = Bus(broker="lhd002", address=unique_queue_name)
#parset = """
#key=value
#"""
#msgbus.send(parset,"Observation123456")<|fim▁end|>
| |
<|file_name|>alpha.py<|end_file_name|><|fim▁begin|>from interface.design.ui_screen import Ui_wnd_gifextract
from PyQt5 import QtWidgets
import sys
import listener
import config
import ffmpeg
import queue
import interface.menus.Frame_CreateGif
import interface.menus.Frame_ExtractFrames
import interface.menus.Frame_Queue
class Screen(QtWidgets.QMainWindow):
def __init__(self, parent=None):
def setupFFMpeg():
self.ffmpeg = ffmpeg.FFmpeg(self.config)
def setupConfig():
self.config = config.Config(self)
def setupQueue():
self.queue = queue.JobQueue(self)
def setupTabs():
self.tab_video = interface.menus.Frame_ExtractFrames.Frame(self)
self.ui.tabWidget.addTab(self.tab_video, "Frame Extraction")
self.tab_gif = interface.menus.Frame_CreateGif.Frame(self)
self.ui.tabWidget.addTab(self.tab_gif, "Gif Creation")
self.tab_queue = interface.menus.Frame_Queue.Frame(self)
self.ui.tabWidget.addTab(self.tab_queue, "Queue")
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_wnd_gifextract()
self.ui.setupUi(self)
self.slots = listener.Slots(self)
self.createLinks()
setupConfig()
setupTabs()
setupFFMpeg()
setupQueue()
<|fim▁hole|> def createLinks(self):
self.ui.actionPreferences.triggered.connect(self.openOptions)
def openOptions(self):
import interface.menus.ConfigMenu
options = interface.menus.ConfigMenu.ConfigMenu(self, self.config)
options.show()
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
program = Screen()
program.show()
sys.exit(app.exec_())<|fim▁end|>
| |
<|file_name|>user-settings-mediator.js<|end_file_name|><|fim▁begin|>define([
'./user-settings'
], function (userSettings) {
var context;
var exposed = {
init: function(thisContext){
context = thisContext;
context.sandbox.on('settings.close', userSettings.close);
context.sandbox.on('settings.open', userSettings.open);
context.sandbox.on('menu.opening', userSettings.handleMenuOpening);
context.sandbox.on('data.clear.all', userSettings.clear);
},
publishMessage: function(params) {
context.sandbox.emit('message.publish', params);
},
publishOpening: function(params){
context.sandbox.emit('menu.opening', params);
},
zoomToLocation: function(params){
context.sandbox.emit('map.zoom.toLocation',params);
},
changeBasemap: function(params) {
context.sandbox.emit('map.basemap.change', params);
},
closeUserSettings: function() {<|fim▁hole|> }
};
return exposed;
});<|fim▁end|>
|
context.sandbox.emit('settings.close');
},
openUserSettings: function() {
context.sandbox.emit('settings.open');
|
<|file_name|>component.js<|end_file_name|><|fim▁begin|>import type { Config } from '../src/core/config'
import type VNode from '../src/core/vdom/vnode'
import type Watcher from '../src/core/observer/watcher'
declare interface Component {
// constructor information
static cid: number;
static options: Object;
// extend
static extend: (options: Object) => Function;
static superOptions: Object;
static extendOptions: Object;
static sealedOptions: Object;
static super: Class<Component>;
// assets
static directive: (id: string, def?: Function | Object) => Function | Object | void;
static component: (id: string, def?: Class<Component> | Object) => Class<Component>;
static filter: (id: string, def?: Function) => Function | void;
// public properties
$el: any; // so that we can attach __vue__ to it
$data: Object;
$options: ComponentOptions;
$parent: Component | void;
$root: Component;
$children: Array<Component>;
$refs: { [key: string]: Component | Element | Array<Component | Element> | void };
$slots: { [key: string]: Array<VNode> };
$scopedSlots: { [key: string]: () => VNodeChildren };
$vnode: VNode; // the placeholder node for the component in parent's render tree
$isServer: boolean;
$props: Object;
// public methods
$mount: (el?: Element | string, hydrating?: boolean) => Component;
$forceUpdate: () => void;
$destroy: () => void;
$set: <T>(target: Object | Array<T>, key: string | number, val: T) => T;
$delete: <T>(target: Object | Array<T>, key: string | number) => void;
$watch: (expOrFn: string | Function, cb: Function, options?: Object) => Function;
$on: (event: string | Array<string>, fn: Function) => Component;
$once: (event: string, fn: Function) => Component;
$off: (event?: string | Array<string>, fn?: Function) => Component;
$emit: (event: string, ...args: Array<mixed>) => Component;
$nextTick: (fn: Function) => void | Promise<*>;
$createElement: (tag?: string | Component, data?: Object, children?: VNodeChildren) => VNode;
// private properties
_uid: number;
_name: string; // this only exists in dev mode
_isVue: true;
_self: Component;
_renderProxy: Component;
_renderContext: ?Component;
_watcher: Watcher;
_watchers: Array<Watcher>;
_computedWatchers: { [key: string]: Watcher };
_data: Object;
_props: Object;
_events: Object;
_inactive: boolean | null;
_directInactive: boolean;
_isMounted: boolean;
_isDestroyed: boolean;
_isBeingDestroyed: boolean;
_vnode: ?VNode; // self root node
_staticTrees: ?Array<VNode>;
_hasHookEvent: boolean;
_provided: ?Object;
// private methods
// lifecycle
_init: Function;
_mount: (el?: Element | void, hydrating?: boolean) => Component;
_update: (vnode: VNode, hydrating?: boolean) => void;
// rendering
_render: () => VNode;
__patch__: (a: Element | VNode | void, b: VNode) => any;
// createElement
// _c is internal that accepts `normalizationType` optimization hint
_c: (vnode?: VNode, data?: VNodeData, children?: VNodeChildren, normalizationType?: number) => VNode | void;
// renderStatic
_m: (index: number, isInFor?: boolean) => VNode | VNodeChildren;
// markOnce
_o: (vnode: VNode | Array<VNode>, index: number, key: string) => VNode | VNodeChildren;
// toString
_s: (value: mixed) => string;
// text to VNode
_v: (value: string | number) => VNode;
// toNumber<|fim▁hole|> // loose equal
_q: (a: mixed, b: mixed) => boolean;
// loose indexOf
_i: (arr: Array<mixed>, val: mixed) => number;
// resolveFilter
_f: (id: string) => Function;
// renderList
_l: (val: mixed, render: Function) => ?Array<VNode>;
// renderSlot
_t: (name: string, fallback: ?Array<VNode>, props: ?Object) => ?Array<VNode>;
// apply v-bind object
_b: (data: any, value: any, asProp?: boolean) => VNodeData;
// check custom keyCode
_k: (eventKeyCode: number, key: string, builtInAlias: number | Array<number> | void) => boolean;
// resolve scoped slots
_u: (scopedSlots: ScopedSlotsData, res?: Object) => { [key: string]: Function };
// allow dynamic method registration
[key: string]: any
}<|fim▁end|>
|
_n: (value: string) => number | string;
// empty vnode
_e: () => VNode;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
PostgreSQL Session API
======================
The Session classes wrap the Queries :py:class:`Session <queries.Session>` and
:py:class:`TornadoSession <queries.tornado_session.TornadoSession>` classes
providing environment variable based configuration.
Environment variables should be set using the ``PGSQL[_DBNAME]`` format
where the value is a PostgreSQL URI.
For PostgreSQL URI format, see:
http://www.postgresql.org/docs/9.3/static/libpq-connect.html#LIBPQ-CONNSTRING
As example, given the environment variable:
.. code:: python
PGSQL_FOO = 'postgresql://bar:baz@foohost:6000/foo'
and code for creating a :py:class:`Session` instance for the database name
``foo``:
.. code:: python<|fim▁hole|>running on ``foohost``, port ``6000`` using the username ``bar`` and the
password ``baz``, connecting to the ``foo`` database.
"""
version_info = (2, 0, 1)
__version__ = '.'.join(str(v) for v in version_info)
import logging
import os
from queries import pool
import queries
from queries import tornado_session
_ARGUMENTS = ['host', 'port', 'dbname', 'user', 'password']
LOGGER = logging.getLogger(__name__)
# For ease of access to different cursor types
from queries import DictCursor
from queries import NamedTupleCursor
from queries import RealDictCursor
from queries import LoggingCursor
from queries import MinTimeLoggingCursor
# Expose exceptions so clients do not need to import queries as well
from queries import DataError
from queries import DatabaseError
from queries import IntegrityError
from queries import InterfaceError
from queries import InternalError
from queries import NotSupportedError
from queries import OperationalError
from queries import ProgrammingError
from queries import QueryCanceledError
from queries import TransactionRollbackError
def _get_uri(dbname):
"""Return the URI for the specified database name from an environment
variable. If dbname is blank, the ``PGSQL`` environment variable is used,
otherwise the database name is cast to upper case and concatenated to
``PGSQL_`` and the URI is retrieved from ``PGSQL_DBNAME``. For example,
if the value ``foo`` is passed in, the environment variable used would be
``PGSQL_FOO``.
:param str dbname: The database name to construct the URI for
:return: str
:raises: KeyError
"""
if not dbname:
return os.environ['PGSQL']
return os.environ['PGSQL_{0}'.format(dbname).upper()]
class Session(queries.Session):
"""Extends queries.Session using configuration data that is stored
in environment variables.
Utilizes connection pooling to ensure that multiple concurrent asynchronous
queries do not block each other. Heavily trafficked services will require
a higher ``max_pool_size`` to allow for greater connection concurrency.
:param str dbname: PostgreSQL database name
:param queries.cursor: The cursor type to use
:param int pool_idle_ttl: How long idle pools keep connections open
:param int pool_max_size: The maximum size of the pool to use
:param str db_url: Optional database connection URL. Use this when
you need to connect to a database that is only known at runtime.
"""
def __init__(self, dbname,
cursor_factory=queries.RealDictCursor,
pool_idle_ttl=pool.DEFAULT_IDLE_TTL,
pool_max_size=pool.DEFAULT_MAX_SIZE,
db_url=None):
if db_url is None:
db_url = _get_uri(dbname)
super(Session, self).__init__(db_url,
cursor_factory,
pool_idle_ttl,
pool_max_size)
class TornadoSession(tornado_session.TornadoSession):
"""Extends queries.TornadoSession using configuration data that is stored
in environment variables.
Utilizes connection pooling to ensure that multiple concurrent asynchronous
queries do not block each other. Heavily trafficked services will require
a higher ``max_pool_size`` to allow for greater connection concurrency.
:py:meth:`query <queries.tornado_session.TornadoSession.query>` and
:py:meth:`callproc <queries.tornado_session.TornadoSession.callproc>` must
call :py:meth:`Results.free <queries.tornado_session.Results.free>`
:param str dbname: PostgreSQL database name
:param queries.cursor: The cursor type to use
:param int pool_idle_ttl: How long idle pools keep connections open
:param int pool_max_size: The maximum size of the pool to use
:param tornado.ioloop.IOLoop ioloop: Pass in the instance of the tornado
IOLoop you would like to use. Defaults to the global instance.
:param str db_url: Optional database connection URL. Use this when
you need to connect to a database that is only known at runtime.
"""
def __init__(self, dbname,
cursor_factory=queries.RealDictCursor,
pool_idle_ttl=pool.DEFAULT_IDLE_TTL,
pool_max_size=tornado_session.DEFAULT_MAX_POOL_SIZE,
io_loop=None, db_url=None):
if db_url is None:
db_url = _get_uri(dbname)
super(TornadoSession, self).__init__(db_url,
cursor_factory,
pool_idle_ttl,
pool_max_size,
io_loop)<|fim▁end|>
|
session = sprockets.postgresql.Session('foo')
A :py:class:`queries.Session` object will be created that connects to Postgres
|
<|file_name|>average_checkpoints.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import collections
import os
import re
import torch
from fairseq.file_io import PathManager
def average_checkpoints(inputs):
"""Loads checkpoints from inputs and returns a model with averaged weights.
Args:
inputs: An iterable of string paths of checkpoints to load from.
Returns:
A dict of string keys mapping to various values. The 'model' key
from the returned dict should correspond to an OrderedDict mapping
string parameter names to torch Tensors.
"""
params_dict = collections.OrderedDict()
params_keys = None
new_state = None
num_models = len(inputs)
for fpath in inputs:
with PathManager.open(fpath, "rb") as f:
state = torch.load(
f,
map_location=(
lambda s, _: torch.serialization.default_restore_location(s, "cpu")
),
)
# Copies over the settings from the first checkpoint
if new_state is None:
new_state = state
model_params = state["model"]
model_params_keys = list(model_params.keys())
if params_keys is None:
params_keys = model_params_keys
elif params_keys != model_params_keys:
raise KeyError(
"For checkpoint {}, expected list of params: {}, "
"but found: {}".format(f, params_keys, model_params_keys)
)
for k in params_keys:
p = model_params[k]
if isinstance(p, torch.HalfTensor):
p = p.float()
if k not in params_dict:
params_dict[k] = p.clone()
# NOTE: clone() is needed in case of p is a shared parameter
else:
params_dict[k] += p
averaged_params = collections.OrderedDict()
for k, v in params_dict.items():
averaged_params[k] = v
if averaged_params[k].is_floating_point():
averaged_params[k].div_(num_models)
else:
averaged_params[k] //= num_models
new_state["model"] = averaged_params
return new_state
def last_n_checkpoints(paths, n, update_based, upper_bound=None):
assert len(paths) == 1
path = paths[0]
if update_based:
pt_regexp = re.compile(r"checkpoint_\d+_(\d+)\.pt")
else:
pt_regexp = re.compile(r"checkpoint(\d+)\.pt")
files = PathManager.ls(path)
entries = []
for f in files:
m = pt_regexp.fullmatch(f)
if m is not None:
sort_key = int(m.group(1))
if upper_bound is None or sort_key <= upper_bound:
entries.append((sort_key, m.group(0)))
if len(entries) < n:
raise Exception(
"Found {} checkpoint files but need at least {}", len(entries), n
)
return [os.path.join(path, x[1]) for x in sorted(entries, reverse=True)[:n]]
def main():
parser = argparse.ArgumentParser(
description="Tool to average the params of input checkpoints to "
"produce a new checkpoint",
)
# fmt: off
parser.add_argument('--inputs', required=True, nargs='+',
help='Input checkpoint file paths.')
parser.add_argument('--output', required=True, metavar='FILE',
help='Write the new checkpoint containing the averaged weights to this path.')
num_group = parser.add_mutually_exclusive_group()
num_group.add_argument('--num-epoch-checkpoints', type=int,
help='if set, will try to find checkpoints with names checkpoint_xx.pt in the '
'path specified by input, and average last this many of them.')
num_group.add_argument('--num-update-checkpoints', type=int,
help='if set, will try to find checkpoints with names checkpoint_ee_xx.pt in the path specified by'
' input, and average last this many of them.')
parser.add_argument('--checkpoint-upper-bound', type=int,
help='when using --num-epoch-checkpoints, this will set an upper bound on which epoch to use, '
'when using --num-update-checkpoints, this will set an upper bound on which update to use'
'e.g., with --num-epoch-checkpoints=10 --checkpoint-upper-bound=50, checkpoints 41-50 would be'
' averaged.'
'e.g., with --num-update-checkpoints=10 --checkpoint-upper-bound=50000, checkpoints 40500-50000 would'
' be averaged assuming --save-interval-updates 500'
)
# fmt: on
args = parser.parse_args()
print(args)
<|fim▁hole|> is_update_based = False
if args.num_update_checkpoints is not None:
num = args.num_update_checkpoints
is_update_based = True
elif args.num_epoch_checkpoints is not None:
num = args.num_epoch_checkpoints
assert args.checkpoint_upper_bound is None or (
args.num_epoch_checkpoints is not None
or args.num_update_checkpoints is not None
), "--checkpoint-upper-bound requires --num-epoch-checkpoints or --num-update-checkpoints"
assert (
args.num_epoch_checkpoints is None or args.num_update_checkpoints is None
), "Cannot combine --num-epoch-checkpoints and --num-update-checkpoints"
if num is not None:
args.inputs = last_n_checkpoints(
args.inputs,
num,
is_update_based,
upper_bound=args.checkpoint_upper_bound,
)
print("averaging checkpoints: ", args.inputs)
new_state = average_checkpoints(args.inputs)
with PathManager.open(args.output, "wb") as f:
torch.save(new_state, f)
print("Finished writing averaged checkpoint to {}".format(args.output))
if __name__ == "__main__":
main()<|fim▁end|>
|
num = None
|
<|file_name|>bootstrapslider.js<|end_file_name|><|fim▁begin|>jQuery('#bootstrapslider').carousel({<|fim▁hole|><|fim▁end|>
|
interval: bootstrapslider_script_vars.interval,
pause: bootstrapslider_script_vars.pause,
wrap: bootstrapslider_script_vars.wrap
});
|
<|file_name|>HttpConnectorFactoryTest.java<|end_file_name|><|fim▁begin|>package io.dropwizard.jetty;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.jetty9.InstrumentedConnectionFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.dropwizard.configuration.ResourceConfigurationSourceProvider;
import io.dropwizard.configuration.YamlConfigurationFactory;
import io.dropwizard.jackson.DiscoverableSubtypeResolver;
import io.dropwizard.jackson.Jackson;
import io.dropwizard.logging.ConsoleAppenderFactory;
import io.dropwizard.logging.FileAppenderFactory;
import io.dropwizard.logging.SyslogAppenderFactory;
import io.dropwizard.util.DataSize;
import io.dropwizard.util.Duration;
import io.dropwizard.validation.BaseValidator;
import org.assertj.core.api.InstanceOfAssertFactories;
import org.eclipse.jetty.http.CookieCompliance;
import org.eclipse.jetty.http.HttpCompliance;
import org.eclipse.jetty.server.ForwardedRequestCustomizer;
import org.eclipse.jetty.server.HttpConfiguration;
import org.eclipse.jetty.server.HttpConnectionFactory;
import org.eclipse.jetty.server.ProxyConnectionFactory;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
import org.eclipse.jetty.util.thread.ScheduledExecutorScheduler;
import org.eclipse.jetty.util.thread.ThreadPool;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import javax.validation.Validator;
import java.util.Optional;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
class HttpConnectorFactoryTest {
private final ObjectMapper objectMapper = Jackson.newObjectMapper();
private final Validator validator = BaseValidator.newValidator();
@BeforeEach
void setUp() {
objectMapper.getSubtypeResolver().registerSubtypes(ConsoleAppenderFactory.class,
FileAppenderFactory.class, SyslogAppenderFactory.class, HttpConnectorFactory.class);
}
@Test
void isDiscoverable() {
assertThat(new DiscoverableSubtypeResolver().getDiscoveredSubtypes())
.contains(HttpConnectorFactory.class);
}
@Test
void testParseMinimalConfiguration() throws Exception {
HttpConnectorFactory http =
new YamlConfigurationFactory<>(HttpConnectorFactory.class, validator, objectMapper, "dw")
.build(new ResourceConfigurationSourceProvider(), "yaml/http-connector-minimal.yml");
assertThat(http.getPort()).isEqualTo(8080);
assertThat(http.getBindHost()).isNull();
assertThat(http.isInheritChannel()).isFalse();
assertThat(http.getHeaderCacheSize()).isEqualTo(DataSize.bytes(512));
assertThat(http.getOutputBufferSize()).isEqualTo(DataSize.kibibytes(32));
assertThat(http.getMaxRequestHeaderSize()).isEqualTo(DataSize.kibibytes(8));
assertThat(http.getMaxResponseHeaderSize()).isEqualTo(DataSize.kibibytes(8));
assertThat(http.getInputBufferSize()).isEqualTo(DataSize.kibibytes(8));
assertThat(http.getIdleTimeout()).isEqualTo(Duration.seconds(30));
assertThat(http.getMinBufferPoolSize()).isEqualTo(DataSize.bytes(64));
assertThat(http.getBufferPoolIncrement()).isEqualTo(DataSize.bytes(1024));
assertThat(http.getMaxBufferPoolSize()).isEqualTo(DataSize.kibibytes(64));
assertThat(http.getMinRequestDataPerSecond()).isEqualTo(DataSize.bytes(0));
assertThat(http.getMinResponseDataPerSecond()).isEqualTo(DataSize.bytes(0));
assertThat(http.getAcceptorThreads()).isEmpty();
assertThat(http.getSelectorThreads()).isEmpty();
assertThat(http.getAcceptQueueSize()).isNull();
assertThat(http.isReuseAddress()).isTrue();
assertThat(http.isUseServerHeader()).isFalse();
assertThat(http.isUseDateHeader()).isTrue();
assertThat(http.isUseForwardedHeaders()).isFalse();
assertThat(http.getHttpCompliance()).isEqualTo(HttpCompliance.RFC7230);
assertThat(http.getRequestCookieCompliance()).isEqualTo(CookieCompliance.RFC6265);
assertThat(http.getResponseCookieCompliance()).isEqualTo(CookieCompliance.RFC6265);
}
@Test
void testParseFullConfiguration() throws Exception {
HttpConnectorFactory http =
new YamlConfigurationFactory<>(HttpConnectorFactory.class, validator, objectMapper, "dw")
.build(new ResourceConfigurationSourceProvider(), "yaml/http-connector.yml");
assertThat(http.getPort()).isEqualTo(9090);
assertThat(http.getBindHost()).isEqualTo("127.0.0.1");
assertThat(http.isInheritChannel()).isTrue();
assertThat(http.getHeaderCacheSize()).isEqualTo(DataSize.bytes(256));
assertThat(http.getOutputBufferSize()).isEqualTo(DataSize.kibibytes(128));
assertThat(http.getMaxRequestHeaderSize()).isEqualTo(DataSize.kibibytes(4));
assertThat(http.getMaxResponseHeaderSize()).isEqualTo(DataSize.kibibytes(4));
assertThat(http.getInputBufferSize()).isEqualTo(DataSize.kibibytes(4));
assertThat(http.getIdleTimeout()).isEqualTo(Duration.seconds(10));
assertThat(http.getMinBufferPoolSize()).isEqualTo(DataSize.bytes(128));
assertThat(http.getBufferPoolIncrement()).isEqualTo(DataSize.bytes(500));
assertThat(http.getMaxBufferPoolSize()).isEqualTo(DataSize.kibibytes(32));
assertThat(http.getMinRequestDataPerSecond()).isEqualTo(DataSize.bytes(42));
assertThat(http.getMinResponseDataPerSecond()).isEqualTo(DataSize.bytes(200));
assertThat(http.getAcceptorThreads()).contains(1);
assertThat(http.getSelectorThreads()).contains(4);
assertThat(http.getAcceptQueueSize()).isEqualTo(1024);
assertThat(http.isReuseAddress()).isFalse();
assertThat(http.isUseServerHeader()).isTrue();
assertThat(http.isUseDateHeader()).isFalse();
assertThat(http.isUseForwardedHeaders()).isTrue();
HttpConfiguration httpConfiguration = http.buildHttpConfiguration();
assertThat(httpConfiguration.getCustomizers()).hasAtLeastOneElementOfType(ForwardedRequestCustomizer.class);
assertThat(http.getHttpCompliance()).isEqualTo(HttpCompliance.RFC2616);
assertThat(http.getRequestCookieCompliance()).isEqualTo(CookieCompliance.RFC2965);
assertThat(http.getResponseCookieCompliance()).isEqualTo(CookieCompliance.RFC6265);
}
@Test
void testBuildConnector() throws Exception {
HttpConnectorFactory http = spy(new HttpConnectorFactory());
http.setBindHost("127.0.0.1");
http.setAcceptorThreads(Optional.of(1));
http.setSelectorThreads(Optional.of(2));
http.setAcceptQueueSize(1024);
http.setMinResponseDataPerSecond(DataSize.bytes(200));
http.setMinRequestDataPerSecond(DataSize.bytes(42));
http.setRequestCookieCompliance(CookieCompliance.RFC6265);
http.setResponseCookieCompliance(CookieCompliance.RFC6265);
MetricRegistry metrics = new MetricRegistry();
ThreadPool threadPool = new QueuedThreadPool();
Server server = null;
ServerConnector connector = null;
try {
server = new Server();
connector = (ServerConnector) http.build(server, metrics, "test-http-connector", threadPool);
assertThat(connector.getPort()).isEqualTo(8080);
assertThat(connector.getHost()).isEqualTo("127.0.0.1");
assertThat(connector.getAcceptQueueSize()).isEqualTo(1024);
assertThat(connector.getReuseAddress()).isTrue();
assertThat(connector.getIdleTimeout()).isEqualTo(30000);
assertThat(connector.getName()).isEqualTo("test-http-connector");
assertThat(connector.getServer()).isSameAs(server);
assertThat(connector.getScheduler()).isInstanceOf(ScheduledExecutorScheduler.class);
assertThat(connector.getExecutor()).isSameAs(threadPool);
verify(http).buildBufferPool(64, 1024, 64 * 1024);
assertThat(connector.getAcceptors()).isEqualTo(1);
assertThat(connector.getSelectorManager().getSelectorCount()).isEqualTo(2);
InstrumentedConnectionFactory connectionFactory =
(InstrumentedConnectionFactory) connector.getConnectionFactory("http/1.1");
assertThat(connectionFactory).isInstanceOf(InstrumentedConnectionFactory.class);
assertThat(connectionFactory)
.extracting("connectionFactory")
.asInstanceOf(InstanceOfAssertFactories.type(HttpConnectionFactory.class))
.satisfies(factory -> {
assertThat(factory.getInputBufferSize()).isEqualTo(8192);
assertThat(factory.getHttpCompliance()).isEqualByComparingTo(HttpCompliance.RFC7230);
})
.extracting(HttpConnectionFactory::getHttpConfiguration)
.satisfies(config -> {
assertThat(config.getHeaderCacheSize()).isEqualTo(512);
assertThat(config.getOutputBufferSize()).isEqualTo(32768);
assertThat(config.getRequestHeaderSize()).isEqualTo(8192);
assertThat(config.getResponseHeaderSize()).isEqualTo(8192);
assertThat(config.getSendDateHeader()).isTrue();
assertThat(config.getSendServerVersion()).isFalse();
assertThat(config.getCustomizers()).noneMatch(customizer -> customizer.getClass().equals(ForwardedRequestCustomizer.class));
assertThat(config.getMinRequestDataRate()).isEqualTo(42);
assertThat(config.getMinResponseDataRate()).isEqualTo(200);
assertThat(config.getRequestCookieCompliance()).isEqualTo(CookieCompliance.RFC6265);
assertThat(config.getResponseCookieCompliance()).isEqualTo(CookieCompliance.RFC6265);
});
} finally {
if (connector != null) {
connector.stop();
}
if (server != null) {
server.stop();
}
}
}
@Test
void testBuildConnectorWithProxyProtocol() throws Exception {
HttpConnectorFactory http = new HttpConnectorFactory();
http.setBindHost("127.0.0.1");
http.setUseProxyProtocol(true);
MetricRegistry metrics = new MetricRegistry();
ThreadPool threadPool = new QueuedThreadPool();
Server server = null;
ServerConnector connector = null;
try {
server = new Server();
connector = (ServerConnector) http.build(server, metrics, "test-http-connector-with-proxy-protocol", threadPool);
assertThat(connector.getConnectionFactories().toArray()[0]).isInstanceOf(ProxyConnectionFactory.class);
} finally {
if (connector != null) {
connector.stop();<|fim▁hole|> }
}
}
@Test
void testDefaultAcceptQueueSize() throws Exception {
HttpConnectorFactory http = new HttpConnectorFactory();
http.setBindHost("127.0.0.1");
http.setAcceptorThreads(Optional.of(1));
http.setSelectorThreads(Optional.of(2));
MetricRegistry metrics = new MetricRegistry();
ThreadPool threadPool = new QueuedThreadPool();
Server server = null;
ServerConnector connector = null;
try {
server = new Server();
connector = (ServerConnector) http.build(server, metrics, "test-http-connector", threadPool);
assertThat(connector.getAcceptQueueSize()).isEqualTo(NetUtil.getTcpBacklog());
} finally {
if (connector != null) {
connector.stop();
}
if (server != null) {
server.stop();
}
}
}
}<|fim▁end|>
|
}
if (server != null) {
server.stop();
|
<|file_name|>camelizeStyle.js<|end_file_name|><|fim▁begin|>import camelize from "./camelize";
const msPattern = /^-ms-/;
function camelizeStyle(string) {
return camelize(string.replace(msPattern, "ms-"));
}<|fim▁hole|><|fim▁end|>
|
export default camelizeStyle;
|
<|file_name|>keccak.py<|end_file_name|><|fim▁begin|># ===================================================================
#
# Copyright (c) 2015, Legrandin <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ===================================================================
"""Keccak family of cryptographic hash algorithms.
`Keccak`_ is the winning algorithm of the SHA-3 competition organized by NIST.
What eventually became SHA-3 is a variant incompatible to Keccak,
even though the security principles and margins remain the same.
If you are interested in writing SHA-3 compliant code, you must use
the modules ``SHA3_224``, ``SHA3_256``, ``SHA3_384`` or ``SHA3_512``.
This module implements the Keccak hash functions for the 64 bit word
length (b=1600) and the fixed digest sizes of 224, 256, 384 and 512 bits.
>>> from Cryptodome.Hash import keccak
>>>
>>> keccak_hash = keccak.new(digest_bits=512)
>>> keccak_hash.update(b'Some data')
>>> print keccak_hash.hexdigest()
.. _Keccak: http://www.keccak.noekeon.org/Keccak-specifications.pdf
"""
from Cryptodome.Util.py3compat import bord
from Cryptodome.Util._raw_api import (load_pycryptodome_raw_lib,
VoidPointer, SmartPointer,
create_string_buffer,
get_raw_buffer, c_size_t,
expect_byte_string)
_raw_keccak_lib = load_pycryptodome_raw_lib("Cryptodome.Hash._keccak",
"""
int keccak_init(void **state,
size_t capacity_bytes,
uint8_t padding_byte);
int keccak_destroy(void *state);
int keccak_absorb(void *state,
const uint8_t *in,
size_t len);
int keccak_squeeze(const void *state,
uint8_t *out,
size_t len);
int keccak_digest(void *state, uint8_t *digest, size_t len);
""")
class Keccak_Hash(object):
"""Class that implements a Keccak hash
"""
def __init__(self, data, digest_bytes, update_after_digest):
#: The size of the resulting hash in bytes.
self.digest_size = digest_bytes
self._update_after_digest = update_after_digest
self._digest_done = False
state = VoidPointer()
result = _raw_keccak_lib.keccak_init(state.address_of(),
c_size_t(self.digest_size * 2),
0x01)
<|fim▁hole|> if result:
raise ValueError("Error %d while instantiating keccak" % result)
self._state = SmartPointer(state.get(),
_raw_keccak_lib.keccak_destroy)
if data:
self.update(data)
def update(self, data):
"""Continue hashing of a message by consuming the next chunk of data.
Repeated calls are equivalent to a single call with the concatenation
of all the arguments. In other words:
>>> m.update(a); m.update(b)
is equivalent to:
>>> m.update(a+b)
:Parameters:
data : byte string
The next chunk of the message being hashed.
"""
if self._digest_done and not self._update_after_digest:
raise TypeError("You can only call 'digest' or 'hexdigest' on this object")
expect_byte_string(data)
result = _raw_keccak_lib.keccak_absorb(self._state.get(),
data,
c_size_t(len(data)))
if result:
raise ValueError("Error %d while updating keccak" % result)
return self
def digest(self):
"""Return the **binary** (non-printable) digest of the message that has been hashed so far.
You cannot update the hash anymore after the first call to ``digest``
(or ``hexdigest``).
:Return: A byte string of `digest_size` bytes.
It may contain non-ASCII characters, including null bytes.
"""
self._digest_done = True
bfr = create_string_buffer(self.digest_size)
result = _raw_keccak_lib.keccak_digest(self._state.get(),
bfr,
c_size_t(self.digest_size))
if result:
raise ValueError("Error %d while squeezing keccak" % result)
return get_raw_buffer(bfr)
def hexdigest(self):
"""Return the **printable** digest of the message that has been hashed so far.
This method does not change the state of the hash object.
:Return: A string of 2* `digest_size` characters. It contains only
hexadecimal ASCII digits.
"""
return "".join(["%02x" % bord(x) for x in self.digest()])
def new(self, **kwargs):
if "digest_bytes" not in kwargs and "digest_bits" not in kwargs:
kwargs["digest_bytes"] = self.digest_size
return new(**kwargs)
def new(**kwargs):
"""Return a fresh instance of the hash object.
:Keywords:
data : byte string
Optional. The very first chunk of the message to hash.
It is equivalent to an early call to ``update()``.
digest_bytes : integer
The size of the digest, in bytes (28, 32, 48, 64).
digest_bits : integer
The size of the digest, in bits (224, 256, 384, 512).
update_after_digest : boolean
Optional. By default, a hash object cannot be updated anymore after
the digest is computed. When this flag is ``True``, such check
is no longer enforced.
:Return: A `Keccak_Hash` object
"""
data = kwargs.pop("data", None)
update_after_digest = kwargs.pop("update_after_digest", False)
digest_bytes = kwargs.pop("digest_bytes", None)
digest_bits = kwargs.pop("digest_bits", None)
if None not in (digest_bytes, digest_bits):
raise TypeError("Only one digest parameter must be provided")
if (None, None) == (digest_bytes, digest_bits):
raise TypeError("Digest size (bits, bytes) not provided")
if digest_bytes is not None:
if digest_bytes not in (28, 32, 48, 64):
raise ValueError("'digest_bytes' must be: 28, 32, 48 or 64")
else:
if digest_bits not in (224, 256, 384, 512):
raise ValueError("'digest_bytes' must be: 224, 256, 384 or 512")
digest_bytes = digest_bits // 8
if kwargs:
raise TypeError("Unknown parameters: " + str(kwargs))
return Keccak_Hash(data, digest_bytes, update_after_digest)<|fim▁end|>
| |
<|file_name|>vbp-optim.py<|end_file_name|><|fim▁begin|>from vsvbp import container, solver
import argparse, sys, os, re
def parse(inputfile):
""" Parse a file using format from
Brandao et al. [Bin Packing and Related Problems: General Arc-flow Formulation with Graph Compression (2013)]
Format:
d (number of dimensions)
C_1 ... C_d capacities of the bins in each dimension
n number of different items
w^1_1 ... w^d_1 d_1 requirements of item 1 + {demand = number of such items}
...
w^1_n ... w^p_n d_n
Return: a list of items and a typical bin
"""
inp = inputfile
#inp = open(filename, 'r')
dim = int(inp.readline())
#if dim > 50: return False, False
cap = map(int, inp.readline().split())
assert dim == len(cap)
nitems = int(inp.readline())
items = []
i = 0
for line in inp:
req = map(int, line.split())
dem = req.pop()
assert len(req) == dim
items.extend([container.Item(req) for j in xrange(dem)])
i += 1
assert i == nitems
inp.close()
return items, container.Bin(cap)
def natural_sort(l):
convert = lambda text: int(text) if text.isdigit() else text.lower()
alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ]
return sorted(l, key = alphanum_key)
def get_subdirectories(directory):
dirs = [os.path.join(directory,name) for name in os.listdir(directory)
if os.path.isdir(os.path.join(directory, name))]
return natural_sort(dirs)
def get_files(directory):
files = [os.path.join(directory,name) for name in os.listdir(directory)
if os.path.isfile(os.path.join(directory, name))]
files.sort()
return natural_sort(files)
def optim_dir(directory, level=0):
files = get_files(directory)
for f in files:
optimize(f, level)
def optim_rec(directory, level=0):
subdir = get_subdirectories(directory)
print " "*level+ "|"+"- "+directory.split('/').pop()
if not subdir:
return optim_dir(directory, level+1)
for d in subdir:
optim_rec(d, level+1)
def optimize(filename, level=0):
fl = open(filename)
items, tbin = parse(fl)
if not items:
fl.close()
return
opt = len(solver.optimize(items, tbin, optimize.dp, optimize.seed).bins)
template = "{0:50}{1:10}"
if level == 0:
st = filename.split('/').pop()
print template.format(st, str(opt))
else:
st = " "*level+"| "+filename.split('/').pop()
print template.format(st, str(opt))
fl.close()
sys.stdout.flush()
def run():
parser = argparse.ArgumentParser(description="Run VSVBP heuristics on given instances")
parser.add_argument('-f', type=argparse.FileType('r'),
help="The path to a file containing the bin packing problem to optimize")
parser.add_argument('-d', help="A directory containing (only) files modeling\
bin packing problems to optimize. Optimize all files in the directory.")
parser.add_argument('-r', action='store_true', help="Recursive. If a directory is provided,\
optimize all files in all final subdirectories.")
parser.add_argument('-u', action='store_true', help="If activated, use dot product heuristics")
parser.add_argument('-s', type=int, help="Set seed to specified value")
args = parser.parse_args()
if not (args.f or args.d):
parser.error('No action requested, add -f or -d')
if args.f and args.d:
parser.error('Too many actions requested, add only -f or -d')
if args.r and not args.d:
sys.stderr.write("Warning recursive argument was specified but")
sys.stderr.write(" no directory was provided. Argument ignored.\n")
if args.d and not os.path.isdir(args.d):
parser.error('Invalid directory')
optimize.dp = args.u
optimize.seed = args.s
if args.f:
items, tbin = parse(args.f)
opt = len(solver.optimize(items, tbin, args.u, args.s).bins)
template = "{0:50}{1:10}"
st = args.f.name.split('/').pop()
print template.format(st, str(opt))
elif not args.r:
optim_dir(args.d)<|fim▁hole|> optim_rec(args.d)
if __name__ == "__main__":
run()<|fim▁end|>
|
else:
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.