prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>RoleInput.tsx<|end_file_name|><|fim▁begin|>/**
* @copyright 2009-2020 Vanilla Forums Inc.<|fim▁hole|> */
import SelectOne, { ISelectOneProps } from "@library/forms/select/SelectOne";
import { t } from "@library/utility/appUtils";
import React from "react";
import { IComboBoxOption } from "@library/features/search/SearchBar";
interface IProps extends ISelectOneProps {
className?: string;
placeholder?: string;
}
export default function RoleInput(props: IProps) {
const options: IComboBoxOption[] = [];
return <SelectOne {...props} options={options} />;
}<|fim▁end|> | * @license GPL-2.0-only |
<|file_name|>grid.js<|end_file_name|><|fim▁begin|>function Grid(size) {
this.size = size;
this.startTiles = 2;
this.cells = [];
this.build();
this.playerTurn = true;
}
// pre-allocate these objects (for speed)
Grid.prototype.indexes = [];
for (var x=0; x<4; x++) {
Grid.prototype.indexes.push([]);
for (var y=0; y<4; y++) {<|fim▁hole|>}
// Build a grid of the specified size
Grid.prototype.build = function () {
for (var x = 0; x < this.size; x++) {
var row = this.cells[x] = [];
for (var y = 0; y < this.size; y++) {
row.push(null);
}
}
};
// Find the first available random position
Grid.prototype.randomAvailableCell = function () {
var cells = this.availableCells();
if (cells.length) {
return cells[Math.floor(Math.random() * cells.length)];
}
};
Grid.prototype.availableCells = function () {
var cells = [];
var self = this;
this.eachCell(function (x, y, tile) {
if (!tile) {
//cells.push(self.indexes[x][y]);
cells.push( {x:x, y:y} );
}
});
return cells;
};
// Call callback for every cell
Grid.prototype.eachCell = function (callback) {
for (var x = 0; x < this.size; x++) {
for (var y = 0; y < this.size; y++) {
callback(x, y, this.cells[x][y]);
}
}
};
// Check if there are any cells available
Grid.prototype.cellsAvailable = function () {
return !!this.availableCells().length;
};
// Check if the specified cell is taken
Grid.prototype.cellAvailable = function (cell) {
return !this.cellOccupied(cell);
};
Grid.prototype.cellOccupied = function (cell) {
return !!this.cellContent(cell);
};
Grid.prototype.cellContent = function (cell) {
if (this.withinBounds(cell)) {
return this.cells[cell.x][cell.y];
} else {
return null;
}
};
// Inserts a tile at its position
Grid.prototype.insertTile = function (tile) {
this.cells[tile.x][tile.y] = tile;
};
Grid.prototype.removeTile = function (tile) {
this.cells[tile.x][tile.y] = null;
};
Grid.prototype.withinBounds = function (position) {
return position.x >= 0 && position.x < this.size &&
position.y >= 0 && position.y < this.size;
};
Grid.prototype.clone = function() {
newGrid = new Grid(this.size);
newGrid.playerTurn = this.playerTurn;
for (var x = 0; x < this.size; x++) {
for (var y = 0; y < this.size; y++) {
if (this.cells[x][y]) {
newGrid.insertTile(this.cells[x][y].clone());
}
}
}
return newGrid;
};
// Set up the initial tiles to start the game with
Grid.prototype.addStartTiles = function () {
for (var i=0; i<this.startTiles; i++) {
this.addRandomTile();
}
};
// Adds a tile in a random position
Grid.prototype.addRandomTile = function () {
if (this.cellsAvailable()) {
var value = Math.random() < 0.9 ? 2 : 4;
//var value = Math.random() < 0.9 ? 256 : 512;
var tile = new Tile(this.randomAvailableCell(), value);
this.insertTile(tile);
}
};
// Save all tile positions and remove merger info
Grid.prototype.prepareTiles = function () {
this.eachCell(function (x, y, tile) {
if (tile) {
tile.mergedFrom = null;
tile.savePosition();
}
});
};
// Move a tile and its representation
Grid.prototype.moveTile = function (tile, cell) {
this.cells[tile.x][tile.y] = null;
this.cells[cell.x][cell.y] = tile;
tile.updatePosition(cell);
};
Grid.prototype.vectors = {
0: { x: 0, y: -1 }, // up
1: { x: 1, y: 0 }, // right
2: { x: 0, y: 1 }, // down
3: { x: -1, y: 0 } // left
}
// Get the vector representing the chosen direction
Grid.prototype.getVector = function (direction) {
// Vectors representing tile movement
return this.vectors[direction];
};
// Move tiles on the grid in the specified direction
// returns true if move was successful
Grid.prototype.move = function (direction) {
// 0: up, 1: right, 2:down, 3: left
var self = this;
var cell, tile;
var vector = this.getVector(direction);
var traversals = this.buildTraversals(vector);
var moved = false;
var score = 0;
var won = false;
// Save the current tile positions and remove merger information
this.prepareTiles();
// Traverse the grid in the right direction and move tiles
traversals.x.forEach(function (x) {
traversals.y.forEach(function (y) {
cell = self.indexes[x][y];
tile = self.cellContent(cell);
if (tile) {
//if (debug) {
//console.log('tile @', x, y);
//}
var positions = self.findFarthestPosition(cell, vector);
var next = self.cellContent(positions.next);
// Only one merger per row traversal?
if (next && next.value === tile.value && !next.mergedFrom) {
var merged = new Tile(positions.next, tile.value * 2);
merged.mergedFrom = [tile, next];
self.insertTile(merged);
self.removeTile(tile);
// Converge the two tiles' positions
tile.updatePosition(positions.next);
// Update the score
score += merged.value;
// The mighty 2048 tile
if (merged.value === 2048) {
won = true;
}
} else {
//if (debug) {
//console.log(cell);
//console.log(tile);
//}
self.moveTile(tile, positions.farthest);
}
if (!self.positionsEqual(cell, tile)) {
self.playerTurn = false;
//console.log('setting player turn to ', self.playerTurn);
moved = true; // The tile moved from its original cell!
}
}
});
});
//console.log('returning, playerturn is', self.playerTurn);
//if (!moved) {
//console.log('cell', cell);
//console.log('tile', tile);
//console.log('direction', direction);
//console.log(this.toString());
//}
return {moved: moved, score: score, won: won};
};
Grid.prototype.computerMove = function() {
this.addRandomTile();
this.playerTurn = true;
}
// Build a list of positions to traverse in the right order
Grid.prototype.buildTraversals = function (vector) {
var traversals = { x: [], y: [] };
for (var pos = 0; pos < this.size; pos++) {
traversals.x.push(pos);
traversals.y.push(pos);
}
// Always traverse from the farthest cell in the chosen direction
if (vector.x === 1) traversals.x = traversals.x.reverse();
if (vector.y === 1) traversals.y = traversals.y.reverse();
return traversals;
};
Grid.prototype.findFarthestPosition = function (cell, vector) {
var previous;
// Progress towards the vector direction until an obstacle is found
do {
previous = cell;
cell = { x: previous.x + vector.x, y: previous.y + vector.y };
} while (this.withinBounds(cell) &&
this.cellAvailable(cell));
return {
farthest: previous,
next: cell // Used to check if a merge is required
};
};
Grid.prototype.movesAvailable = function () {
return this.cellsAvailable() || this.tileMatchesAvailable();
};
// Check for available matches between tiles (more expensive check)
// returns the number of matches
Grid.prototype.tileMatchesAvailable = function () {
var self = this;
//var matches = 0;
var tile;
for (var x = 0; x < this.size; x++) {
for (var y = 0; y < this.size; y++) {
tile = this.cellContent({ x: x, y: y });
if (tile) {
for (var direction = 0; direction < 4; direction++) {
var vector = self.getVector(direction);
var cell = { x: x + vector.x, y: y + vector.y };
var other = self.cellContent(cell);
if (other && other.value === tile.value) {
return true; //matches++; // These two tiles can be merged
}
}
}
}
}
//console.log(matches);
return false; //matches;
};
Grid.prototype.positionsEqual = function (first, second) {
return first.x === second.x && first.y === second.y;
};
Grid.prototype.toString = function() {
string = '';
for (var i=0; i<4; i++) {
for (var j=0; j<4; j++) {
if (this.cells[j][i]) {
string += this.cells[j][i].value + ' ';
} else {
string += '_ ';
}
}
string += '\n';
}
return string;
}
// counts the number of isolated groups.
Grid.prototype.islands = function() {
var self = this;
var mark = function(x, y, value) {
if (x >= 0 && x <= 3 && y >= 0 && y <= 3 &&
self.cells[x][y] &&
self.cells[x][y].value == value &&
!self.cells[x][y].marked ) {
self.cells[x][y].marked = true;
for (direction in [0,1,2,3]) {
var vector = self.getVector(direction);
mark(x + vector.x, y + vector.y, value);
}
}
}
var islands = 0;
for (var x=0; x<4; x++) {
for (var y=0; y<4; y++) {
if (this.cells[x][y]) {
this.cells[x][y].marked = false
}
}
}
for (var x=0; x<4; x++) {
for (var y=0; y<4; y++) {
if (this.cells[x][y] &&
!this.cells[x][y].marked) {
islands++;
mark(x, y , this.cells[x][y].value);
}
}
}
return islands;
}
/// measures how smooth the grid is (as if the values of the pieces
// were interpreted as elevations). Sums of the pairwise difference
// between neighboring tiles (in log space, so it represents the
// number of merges that need to happen before they can merge).
// Note that the pieces can be distant
Grid.prototype.smoothness = function() {
var smoothness = 0;
for (var x=0; x<4; x++) {
for (var y=0; y<4; y++) {
if ( this.cellOccupied( this.indexes[x][y] )) {
var value = Math.log(this.cellContent( this.indexes[x][y] ).value) / Math.log(2);
for (var direction=1; direction<=2; direction++) {
var vector = this.getVector(direction);
var targetCell = this.findFarthestPosition(this.indexes[x][y], vector).next;
if (this.cellOccupied(targetCell)) {
var target = this.cellContent(targetCell);
var targetValue = Math.log(target.value) / Math.log(2);
smoothness -= Math.abs(value - targetValue);
}
}
}
}
}
return smoothness;
}
Grid.prototype.monotonicity = function() {
var self = this;
var marked = [];
var queued = [];
var highestValue = 0;
var highestCell = {x:0, y:0};
for (var x=0; x<4; x++) {
marked.push([]);
queued.push([]);
for (var y=0; y<4; y++) {
marked[x].push(false);
queued[x].push(false);
if (this.cells[x][y] &&
this.cells[x][y].value > highestValue) {
highestValue = this.cells[x][y].value;
highestCell.x = x;
highestCell.y = y;
}
}
}
increases = 0;
cellQueue = [highestCell];
queued[highestCell.x][highestCell.y] = true;
markList = [highestCell];
markAfter = 1; // only mark after all queued moves are done, as if searching in parallel
var markAndScore = function(cell) {
markList.push(cell);
var value;
if (self.cellOccupied(cell)) {
value = Math.log(self.cellContent(cell).value) / Math.log(2);
} else {
value = 0;
}
for (direction in [0,1,2,3]) {
var vector = self.getVector(direction);
var target = { x: cell.x + vector.x, y: cell.y+vector.y }
if (self.withinBounds(target) && !marked[target.x][target.y]) {
if ( self.cellOccupied(target) ) {
targetValue = Math.log(self.cellContent(target).value ) / Math.log(2);
if ( targetValue > value ) {
//console.log(cell, value, target, targetValue);
increases += targetValue - value;
}
}
if (!queued[target.x][target.y]) {
cellQueue.push(target);
queued[target.x][target.y] = true;
}
}
}
if (markAfter == 0) {
while (markList.length > 0) {
var cel = markList.pop();
marked[cel.x][cel.y] = true;
}
markAfter = cellQueue.length;
}
}
while (cellQueue.length > 0) {
markAfter--;
markAndScore(cellQueue.shift())
}
return -increases;
}
// measures how monotonic the grid is. This means the values of the tiles are strictly increasing
// or decreasing in both the left/right and up/down directions
Grid.prototype.monotonicity2 = function() {
// scores for all four directions
var totals = [0, 0, 0, 0];
// up/down direction
for (var x=0; x<4; x++) {
var current = 0;
var next = current+1;
while ( next<4 ) {
while ( next<4 && !this.cellOccupied( this.indexes[x][next] )) {
next++;
}
if (next>=4) { next--; }
var currentValue = this.cellOccupied({x:x, y:current}) ?
Math.log(this.cellContent( this.indexes[x][current] ).value) / Math.log(2) :
0;
var nextValue = this.cellOccupied({x:x, y:next}) ?
Math.log(this.cellContent( this.indexes[x][next] ).value) / Math.log(2) :
0;
if (currentValue > nextValue) {
totals[0] += nextValue - currentValue;
} else if (nextValue > currentValue) {
totals[1] += currentValue - nextValue;
}
current = next;
next++;
}
}
// left/right direction
for (var y=0; y<4; y++) {
var current = 0;
var next = current+1;
while ( next<4 ) {
while ( next<4 && !this.cellOccupied( this.indexes[next][y] )) {
next++;
}
if (next>=4) { next--; }
var currentValue = this.cellOccupied({x:current, y:y}) ?
Math.log(this.cellContent( this.indexes[current][y] ).value) / Math.log(2) :
0;
var nextValue = this.cellOccupied({x:next, y:y}) ?
Math.log(this.cellContent( this.indexes[next][y] ).value) / Math.log(2) :
0;
if (currentValue > nextValue) {
totals[2] += nextValue - currentValue;
} else if (nextValue > currentValue) {
totals[3] += currentValue - nextValue;
}
current = next;
next++;
}
}
return Math.max(totals[0], totals[1]) + Math.max(totals[2], totals[3]);
}
Grid.prototype.maxValue = function() {
var max = 0;
for (var x=0; x<4; x++) {
for (var y=0; y<4; y++) {
if (this.cellOccupied(this.indexes[x][y])) {
var value = this.cellContent(this.indexes[x][y]).value;
if (value > max) {
max = value;
}
}
}
}
return Math.log(max) / Math.log(2);
}
// WIP. trying to favor top-heavy distributions (force consolidation of higher value tiles)
/*
Grid.prototype.valueSum = function() {
var valueCount = [];
for (var i=0; i<11; i++) {
valueCount.push(0);
}
for (var x=0; x<4; x++) {
for (var y=0; y<4; y++) {
if (this.cellOccupied(this.indexes[x][y])) {
valueCount[Math.log(this.cellContent(this.indexes[x][y]).value) / Math.log(2)]++;
}
}
}
var sum = 0;
for (var i=1; i<11; i++) {
sum += valueCount[i] * Math.pow(2, i) + i;
}
return sum;
}
*/
// check for win
Grid.prototype.isWin = function() {
var self = this;
for (var x=0; x<4; x++) {
for (var y=0; y<4; y++) {
if (self.cellOccupied(this.indexes[x][y])) {
if (self.cellContent(this.indexes[x][y]).value == 2048) {
return true;
}
}
}
}
return false;
}
//Grid.prototype.zobristTable = {}
//for
//Grid.prototype.hash = function() {
//}<|fim▁end|> | Grid.prototype.indexes[x].push( {x:x, y:y} );
} |
<|file_name|>notify.js<|end_file_name|><|fim▁begin|>Notify = function(text, callback, close_callback, style) {
var time = '10000';
var $container = $('#notifications');
var icon = '<i class="fa fa-info-circle "></i>';
if (typeof style == 'undefined' ) style = 'warning'
var html = $('<div class="alert alert-' + style + ' hide">' + icon + " " + text + '</div>');
$('<a>',{
text: '×',
class: 'close',
style: 'padding-left: 10px;',
href: '#',
click: function(e){
e.preventDefault()
close_callback && close_callback()
remove_notice()
}
}).prependTo(html)
$container.prepend(html)
html.removeClass('hide').hide().fadeIn('slow')
function remove_notice() {
html.stop().fadeOut('slow').remove()
}
var timer = setInterval(remove_notice, time);
$(html).hover(function(){
clearInterval(timer);
}, function(){
timer = setInterval(remove_notice, time);
});
html.on('click', function () {
clearInterval(timer)<|fim▁hole|>
}<|fim▁end|> | callback && callback()
remove_notice()
}); |
<|file_name|>StereoscopicsManager.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2013 Team XBMC
* http://xbmc.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
*/
/*!
* @file StereoscopicsManager.cpp
* @brief This class acts as container for stereoscopic related functions
*/
#include <stdlib.h>
#include "StereoscopicsManager.h"
#include "Application.h"
#include "ServiceBroker.h"
#include "messaging/ApplicationMessenger.h"
#include "dialogs/GUIDialogKaiToast.h"
#include "dialogs/GUIDialogSelect.h"
#include "GUIInfoManager.h"
#include "GUIUserMessages.h"
#include "guilib/LocalizeStrings.h"
#include "input/Key.h"
#include "guilib/GUIWindowManager.h"
#include "settings/AdvancedSettings.h"
#include "settings/lib/Setting.h"
#include "settings/Settings.h"
#include "rendering/RenderSystem.h"
#include "utils/log.h"
#include "utils/RegExp.h"
#include "utils/StringUtils.h"
#include "utils/Variant.h"
#include "windowing/WindowingFactory.h"
#include "guiinfo/GUIInfoLabels.h"
using namespace KODI::MESSAGING;
struct StereoModeMap
{
const char* name;
RENDER_STEREO_MODE mode;
};
static const struct StereoModeMap VideoModeToGuiModeMap[] =
{
{ "mono", RENDER_STEREO_MODE_OFF },
{ "left_right", RENDER_STEREO_MODE_SPLIT_VERTICAL },
{ "right_left", RENDER_STEREO_MODE_SPLIT_VERTICAL },
{ "top_bottom", RENDER_STEREO_MODE_SPLIT_HORIZONTAL },
{ "bottom_top", RENDER_STEREO_MODE_SPLIT_HORIZONTAL },
{ "checkerboard_rl", RENDER_STEREO_MODE_CHECKERBOARD },
{ "checkerboard_lr", RENDER_STEREO_MODE_CHECKERBOARD },
{ "row_interleaved_rl", RENDER_STEREO_MODE_INTERLACED },
{ "row_interleaved_lr", RENDER_STEREO_MODE_INTERLACED },
{ "col_interleaved_rl", RENDER_STEREO_MODE_OFF }, // unsupported
{ "col_interleaved_lr", RENDER_STEREO_MODE_OFF }, // unsupported
{ "anaglyph_cyan_red", RENDER_STEREO_MODE_ANAGLYPH_RED_CYAN },
{ "anaglyph_green_magenta", RENDER_STEREO_MODE_ANAGLYPH_GREEN_MAGENTA },
{ "anaglyph_yellow_blue", RENDER_STEREO_MODE_ANAGLYPH_YELLOW_BLUE },
#ifndef TARGET_RASPBERRY_PI
{ "block_lr", RENDER_STEREO_MODE_HARDWAREBASED },
{ "block_rl", RENDER_STEREO_MODE_HARDWAREBASED },
#else
{ "block_lr", RENDER_STEREO_MODE_SPLIT_HORIZONTAL }, // fallback
{ "block_rl", RENDER_STEREO_MODE_SPLIT_HORIZONTAL }, // fallback
#endif
{}
};
static const struct StereoModeMap StringToGuiModeMap[] =
{
{ "off", RENDER_STEREO_MODE_OFF },
{ "split_vertical", RENDER_STEREO_MODE_SPLIT_VERTICAL },
{ "side_by_side", RENDER_STEREO_MODE_SPLIT_VERTICAL }, // alias
{ "sbs", RENDER_STEREO_MODE_SPLIT_VERTICAL }, // alias
{ "split_horizontal", RENDER_STEREO_MODE_SPLIT_HORIZONTAL },
{ "over_under", RENDER_STEREO_MODE_SPLIT_HORIZONTAL }, // alias
{ "tab", RENDER_STEREO_MODE_SPLIT_HORIZONTAL }, // alias
{ "row_interleaved", RENDER_STEREO_MODE_INTERLACED },
{ "interlaced", RENDER_STEREO_MODE_INTERLACED }, // alias
{ "checkerboard", RENDER_STEREO_MODE_CHECKERBOARD },
{ "anaglyph_cyan_red", RENDER_STEREO_MODE_ANAGLYPH_RED_CYAN },
{ "anaglyph_green_magenta", RENDER_STEREO_MODE_ANAGLYPH_GREEN_MAGENTA },
{ "anaglyph_yellow_blue", RENDER_STEREO_MODE_ANAGLYPH_YELLOW_BLUE },
{ "hardware_based", RENDER_STEREO_MODE_HARDWAREBASED },
{ "monoscopic", RENDER_STEREO_MODE_MONO },
{}
};
CStereoscopicsManager::CStereoscopicsManager(void)
{
m_stereoModeSetByUser = RENDER_STEREO_MODE_UNDEFINED;
m_lastStereoModeSetByUser = RENDER_STEREO_MODE_UNDEFINED;
}
CStereoscopicsManager::~CStereoscopicsManager(void) = default;
CStereoscopicsManager& CStereoscopicsManager::GetInstance()
{
static CStereoscopicsManager sStereoscopicsManager;
return sStereoscopicsManager;
}
void CStereoscopicsManager::Initialize(void)
{
// turn off stereo mode on XBMC startup
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, RENDER_STEREO_MODE_OFF);
SetStereoMode(RENDER_STEREO_MODE_OFF);
}
RENDER_STEREO_MODE CStereoscopicsManager::GetStereoMode(void)
{
return (RENDER_STEREO_MODE) CServiceBroker::GetSettings().GetInt(CSettings::SETTING_VIDEOSCREEN_STEREOSCOPICMODE);
}
void CStereoscopicsManager::SetStereoModeByUser(const RENDER_STEREO_MODE &mode)
{
// only update last user mode if desired mode is different from current
if (mode != m_stereoModeSetByUser)
m_lastStereoModeSetByUser = m_stereoModeSetByUser;
m_stereoModeSetByUser = mode;
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, mode);
SetStereoMode(mode);
}
void CStereoscopicsManager::SetStereoMode(const RENDER_STEREO_MODE &mode)
{
RENDER_STEREO_MODE currentMode = GetStereoMode();
RENDER_STEREO_MODE applyMode = mode;
// resolve automatic mode before applying
if (mode == RENDER_STEREO_MODE_AUTO)
applyMode = GetStereoModeOfPlayingVideo();
if (applyMode != currentMode && applyMode >= RENDER_STEREO_MODE_OFF)
{
if (!g_Windowing.SupportsStereo(applyMode))
return;
CServiceBroker::GetSettings().SetInt(CSettings::SETTING_VIDEOSCREEN_STEREOSCOPICMODE, applyMode);
}
}
RENDER_STEREO_MODE CStereoscopicsManager::GetNextSupportedStereoMode(const RENDER_STEREO_MODE ¤tMode, int step)
{
RENDER_STEREO_MODE mode = currentMode;
do {
mode = (RENDER_STEREO_MODE) ((mode + step) % RENDER_STEREO_MODE_COUNT);
if(g_Windowing.SupportsStereo(mode))
break;
} while (mode != currentMode);
return mode;
}
std::string CStereoscopicsManager::DetectStereoModeByString(const std::string &needle)
{
std::string stereoMode = "mono";
std::string searchString(needle);
CRegExp re(true);
if (!re.RegComp(g_advancedSettings.m_stereoscopicregex_3d.c_str()))
{
CLog::Log(LOGERROR, "%s: Invalid RegExp for matching 3d content:'%s'", __FUNCTION__, g_advancedSettings.m_stereoscopicregex_3d.c_str());
return stereoMode;
}
if (re.RegFind(searchString) == -1)
return stereoMode; // no match found for 3d content, assume mono mode
if (!re.RegComp(g_advancedSettings.m_stereoscopicregex_sbs.c_str()))
{
CLog::Log(LOGERROR, "%s: Invalid RegExp for matching 3d SBS content:'%s'", __FUNCTION__, g_advancedSettings.m_stereoscopicregex_sbs.c_str());
return stereoMode;
}
if (re.RegFind(searchString) > -1)
{
stereoMode = "left_right";
return stereoMode;
}
if (!re.RegComp(g_advancedSettings.m_stereoscopicregex_tab.c_str()))
{
CLog::Log(LOGERROR, "%s: Invalid RegExp for matching 3d TAB content:'%s'", __FUNCTION__, g_advancedSettings.m_stereoscopicregex_tab.c_str());
return stereoMode;
}
if (re.RegFind(searchString) > -1)
stereoMode = "top_bottom";
if (!re.RegComp(g_advancedSettings.m_stereoscopicregex_mvc.c_str()))
{
CLog::Log(LOGERROR, "%s: Invalid RegExp for matching 3d MVC content:'%s'", __FUNCTION__, g_advancedSettings.m_stereoscopicregex_mvc.c_str());
return stereoMode;
}
if (re.RegFind(searchString) > -1)
stereoMode = "left_right";
return stereoMode;
}
RENDER_STEREO_MODE CStereoscopicsManager::GetStereoModeByUserChoice(const std::string &heading)
{
RENDER_STEREO_MODE mode = GetStereoMode();
// if no stereo mode is set already, suggest mode of current video by preselecting it
if (mode == RENDER_STEREO_MODE_OFF)
mode = GetStereoModeOfPlayingVideo();
CGUIDialogSelect* pDlgSelect = g_windowManager.GetWindow<CGUIDialogSelect>(WINDOW_DIALOG_SELECT);
pDlgSelect->Reset();
if (heading.empty())
pDlgSelect->SetHeading(CVariant{g_localizeStrings.Get(36528)});
else
pDlgSelect->SetHeading(CVariant{heading});
// prepare selectable stereo modes
std::vector<RENDER_STEREO_MODE> selectableModes;
for (int i = RENDER_STEREO_MODE_OFF; i < RENDER_STEREO_MODE_COUNT; i++)
{
RENDER_STEREO_MODE selectableMode = (RENDER_STEREO_MODE) i;
if (g_Windowing.SupportsStereo(selectableMode))
{
selectableModes.push_back(selectableMode);
std::string label = GetLabelForStereoMode((RENDER_STEREO_MODE) i);
pDlgSelect->Add( label );
if (mode == selectableMode)
pDlgSelect->SetSelected( label );
}
// inject AUTO pseudo mode after OFF
if (i == RENDER_STEREO_MODE_OFF)
{
selectableModes.push_back(RENDER_STEREO_MODE_AUTO);
pDlgSelect->Add(GetLabelForStereoMode(RENDER_STEREO_MODE_AUTO));
}
}
pDlgSelect->Open();
int iItem = pDlgSelect->GetSelectedItem();
if (iItem > -1 && pDlgSelect->IsConfirmed())
mode = (RENDER_STEREO_MODE) selectableModes[iItem];
else
mode = GetStereoMode();
return mode;
}
RENDER_STEREO_MODE CStereoscopicsManager::GetStereoModeOfPlayingVideo(void)
{
RENDER_STEREO_MODE mode = RENDER_STEREO_MODE_OFF;
std::string playerMode = GetVideoStereoMode();
if (!playerMode.empty())
{
int convertedMode = ConvertVideoToGuiStereoMode(playerMode);
if (convertedMode > -1)
mode = (RENDER_STEREO_MODE) convertedMode;
}
CLog::Log(LOGDEBUG, "StereoscopicsManager: autodetected stereo mode for movie mode %s is: %s", playerMode.c_str(), ConvertGuiStereoModeToString(mode));
return mode;
}
const std::string &CStereoscopicsManager::GetLabelForStereoMode(const RENDER_STEREO_MODE &mode) const
{
int msgId;
switch(mode) {
case RENDER_STEREO_MODE_AUTO:
msgId = 36532;
break;
case RENDER_STEREO_MODE_ANAGLYPH_YELLOW_BLUE:
msgId = 36510;
break;
case RENDER_STEREO_MODE_INTERLACED:
msgId = 36507;
break;
case RENDER_STEREO_MODE_CHECKERBOARD:
msgId = 36511;
break;
case RENDER_STEREO_MODE_HARDWAREBASED:
msgId = 36508;
break;
case RENDER_STEREO_MODE_MONO:
msgId = 36509;
break;
default:
msgId = 36502 + mode;
}
return g_localizeStrings.Get(msgId);
}
RENDER_STEREO_MODE CStereoscopicsManager::GetPreferredPlaybackMode(void)
{
return (RENDER_STEREO_MODE) CServiceBroker::GetSettings().GetInt(CSettings::SETTING_VIDEOSCREEN_PREFEREDSTEREOSCOPICMODE);
}
int CStereoscopicsManager::ConvertVideoToGuiStereoMode(const std::string &mode)
{
size_t i = 0;
while (VideoModeToGuiModeMap[i].name)
{
if (mode == VideoModeToGuiModeMap[i].name && g_Windowing.SupportsStereo(VideoModeToGuiModeMap[i].mode))
return VideoModeToGuiModeMap[i].mode;
i++;
}
return -1;
}
int CStereoscopicsManager::ConvertStringToGuiStereoMode(const std::string &mode)
{
size_t i = 0;
while (StringToGuiModeMap[i].name)
{
if (mode == StringToGuiModeMap[i].name)
return StringToGuiModeMap[i].mode;
i++;
}
return ConvertVideoToGuiStereoMode(mode);
}
const char* CStereoscopicsManager::ConvertGuiStereoModeToString(const RENDER_STEREO_MODE &mode)
{
size_t i = 0;
while (StringToGuiModeMap[i].name)
{
if (StringToGuiModeMap[i].mode == mode)
return StringToGuiModeMap[i].name;
i++;
}
return "";
}
std::string CStereoscopicsManager::NormalizeStereoMode(const std::string &mode)
{
if (!mode.empty() && mode != "mono")
{
int guiMode = ConvertStringToGuiStereoMode(mode);
if (guiMode > -1)
return ConvertGuiStereoModeToString((RENDER_STEREO_MODE) guiMode);
else
return mode;
}
return "mono";
}
CAction CStereoscopicsManager::ConvertActionCommandToAction(const std::string &command, const std::string ¶meter)
{
std::string cmd = command;
std::string para = parameter;
StringUtils::ToLower(cmd);
StringUtils::ToLower(para);
if (cmd == "setstereomode")
{
int actionId = -1;
if (para == "next")
actionId = ACTION_STEREOMODE_NEXT;
else if (para == "previous")
actionId = ACTION_STEREOMODE_PREVIOUS;
else if (para == "toggle")
actionId = ACTION_STEREOMODE_TOGGLE;
else if (para == "select")
actionId = ACTION_STEREOMODE_SELECT;
else if (para == "tomono")
actionId = ACTION_STEREOMODE_TOMONO;
// already have a valid actionID return it
if (actionId > -1)
return CAction(actionId);
// still no valid action ID, check if parameter is a supported stereomode
if (ConvertStringToGuiStereoMode(para) > -1)
return CAction(ACTION_STEREOMODE_SET, para);
}
return CAction(ACTION_NONE);
}
void CStereoscopicsManager::OnSettingChanged(std::shared_ptr<const CSetting> setting)
{
if (setting == NULL)
return;
const std::string &settingId = setting->GetId();
if (settingId == CSettings::SETTING_VIDEOSCREEN_STEREOSCOPICMODE)
{
RENDER_STEREO_MODE mode = GetStereoMode();
CLog::Log(LOGDEBUG, "StereoscopicsManager: stereo mode setting changed to %s", ConvertGuiStereoModeToString(mode));
ApplyStereoMode(mode);
}
}
bool CStereoscopicsManager::OnMessage(CGUIMessage &message)
{
switch (message.GetMessage())
{
case GUI_MSG_PLAYBACK_STARTED:
OnPlaybackStarted();
break;
case GUI_MSG_PLAYBACK_STOPPED:
case GUI_MSG_PLAYLISTPLAYER_STOPPED:
OnPlaybackStopped();
break;
}
return false;
}
bool CStereoscopicsManager::OnAction(const CAction &action)
{
RENDER_STEREO_MODE mode = GetStereoMode();
if (action.GetID() == ACTION_STEREOMODE_NEXT)
{
SetStereoModeByUser(GetNextSupportedStereoMode(mode));
return true;
}
else if (action.GetID() == ACTION_STEREOMODE_PREVIOUS)
{
SetStereoModeByUser(GetNextSupportedStereoMode(mode, RENDER_STEREO_MODE_COUNT - 1));
return true;
}
else if (action.GetID() == ACTION_STEREOMODE_TOGGLE)
{
if (mode == RENDER_STEREO_MODE_OFF)
{
RENDER_STEREO_MODE targetMode = GetPreferredPlaybackMode();
// if user selected a specific mode before, make sure to
// switch back into that mode on toggle.
if (m_stereoModeSetByUser != RENDER_STEREO_MODE_UNDEFINED)
{
// if user mode is set to OFF, he manually turned it off before. In this case use the last user applied mode
if (m_stereoModeSetByUser != RENDER_STEREO_MODE_OFF)
targetMode = m_stereoModeSetByUser;
else if (m_lastStereoModeSetByUser != RENDER_STEREO_MODE_UNDEFINED && m_lastStereoModeSetByUser != RENDER_STEREO_MODE_OFF)
targetMode = m_lastStereoModeSetByUser;
}
SetStereoModeByUser(targetMode);
}
else
{
SetStereoModeByUser(RENDER_STEREO_MODE_OFF);
}
return true;
}
else if (action.GetID() == ACTION_STEREOMODE_SELECT)
{
SetStereoModeByUser(GetStereoModeByUserChoice());
return true;
}
else if (action.GetID() == ACTION_STEREOMODE_TOMONO)
{
if (mode == RENDER_STEREO_MODE_MONO)
{
RENDER_STEREO_MODE targetMode = GetPreferredPlaybackMode();
// if we have an old userdefined stereomode, use that one as toggle target
if (m_stereoModeSetByUser != RENDER_STEREO_MODE_UNDEFINED)
{
// if user mode is set to OFF, he manually turned it off before. In this case use the last user applied mode
if (m_stereoModeSetByUser != RENDER_STEREO_MODE_OFF && m_stereoModeSetByUser != mode)
targetMode = m_stereoModeSetByUser;
else if (m_lastStereoModeSetByUser != RENDER_STEREO_MODE_UNDEFINED && m_lastStereoModeSetByUser != RENDER_STEREO_MODE_OFF && m_lastStereoModeSetByUser != mode)
targetMode = m_lastStereoModeSetByUser;
}
SetStereoModeByUser(targetMode);
}
else
{
SetStereoModeByUser(RENDER_STEREO_MODE_MONO);
}
return true;
}
else if (action.GetID() == ACTION_STEREOMODE_SET)
{
int stereoMode = ConvertStringToGuiStereoMode(action.GetName());
if (stereoMode > -1)
SetStereoModeByUser( (RENDER_STEREO_MODE) stereoMode );
return true;
}
return false;
}
void CStereoscopicsManager::ApplyStereoMode(const RENDER_STEREO_MODE &mode, bool notify)
{
RENDER_STEREO_MODE currentMode = g_graphicsContext.GetStereoMode();
CLog::Log(LOGDEBUG, "StereoscopicsManager::ApplyStereoMode: trying to apply stereo mode. Current: %s | Target: %s", ConvertGuiStereoModeToString(currentMode), ConvertGuiStereoModeToString(mode));
if (currentMode != mode)
{
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, mode);
g_graphicsContext.SetNextStereoMode(mode);
CLog::Log(LOGDEBUG, "StereoscopicsManager: stereo mode changed to %s", ConvertGuiStereoModeToString(mode));
if (notify)
CGUIDialogKaiToast::QueueNotification(CGUIDialogKaiToast::Info, g_localizeStrings.Get(36501), GetLabelForStereoMode(mode));
}
}
std::string CStereoscopicsManager::GetVideoStereoMode()
{
std::string playerMode;
if (g_application.m_pPlayer->IsPlaying())
{
SPlayerVideoStreamInfo videoInfo;
g_application.m_pPlayer->GetVideoStreamInfo(CURRENT_STREAM, videoInfo);
playerMode = videoInfo.stereoMode;
}
return playerMode;
}
bool CStereoscopicsManager::IsVideoStereoscopic()
{
std::string mode = GetVideoStereoMode();
return !mode.empty() && mode != "mono";
}
void CStereoscopicsManager::OnPlaybackStarted(void)
{
STEREOSCOPIC_PLAYBACK_MODE playbackMode = (STEREOSCOPIC_PLAYBACK_MODE) CServiceBroker::GetSettings().GetInt(CSettings::SETTING_VIDEOPLAYER_STEREOSCOPICPLAYBACKMODE);
RENDER_STEREO_MODE mode = GetStereoMode();
// early return if playback mode should be ignored and we're in no stereoscopic mode right now
if (playbackMode == STEREOSCOPIC_PLAYBACK_MODE_IGNORE && mode == RENDER_STEREO_MODE_OFF)
return;
if (!CStereoscopicsManager::IsVideoStereoscopic())
{
// exit stereo mode if started item is not stereoscopic
// and if user prefers to stop 3D playback when movie is finished
if (mode != RENDER_STEREO_MODE_OFF && CServiceBroker::GetSettings().GetBool(CSettings::SETTING_VIDEOPLAYER_QUITSTEREOMODEONSTOP))
{
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, RENDER_STEREO_MODE_OFF);
SetStereoMode(RENDER_STEREO_MODE_OFF);
}
return;
}
// if we're not in stereomode yet, restore previously selected stereo mode in case it was user selected
if (m_stereoModeSetByUser != RENDER_STEREO_MODE_UNDEFINED)
{
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, m_stereoModeSetByUser);
SetStereoMode(m_stereoModeSetByUser);
return;
}
RENDER_STEREO_MODE preferred = GetPreferredPlaybackMode();
RENDER_STEREO_MODE playing = GetStereoModeOfPlayingVideo();
if (mode != RENDER_STEREO_MODE_OFF)
{
// don't change mode if user selected to not exit stereomode on playback stop
// users selecting this option usually have to manually switch their TV into 3D mode
// and would be annoyed by having to switch TV modes when next movies comes up
// @todo probably add a new setting for just this behavior
if (CServiceBroker::GetSettings().GetBool(CSettings::SETTING_VIDEOPLAYER_QUITSTEREOMODEONSTOP) == false)
return;
// only change to new stereo mode if not yet in preferred stereo mode
if (mode == preferred || (preferred == RENDER_STEREO_MODE_AUTO && mode == playing))
return;<|fim▁hole|> case STEREOSCOPIC_PLAYBACK_MODE_ASK: // Ask
{
CApplicationMessenger::GetInstance().SendMsg(TMSG_MEDIA_PAUSE);
CGUIDialogSelect* pDlgSelect = g_windowManager.GetWindow<CGUIDialogSelect>(WINDOW_DIALOG_SELECT);
pDlgSelect->Reset();
pDlgSelect->SetHeading(CVariant{g_localizeStrings.Get(36527)});
int idx_playing = -1;
// add choices
int idx_preferred = pDlgSelect->Add(g_localizeStrings.Get(36524) // preferred
+ " ("
+ GetLabelForStereoMode(preferred)
+ ")");
int idx_mono = pDlgSelect->Add(GetLabelForStereoMode(RENDER_STEREO_MODE_MONO)); // mono / 2d
if (playing != RENDER_STEREO_MODE_OFF && playing != preferred && preferred != RENDER_STEREO_MODE_AUTO && g_Windowing.SupportsStereo(playing)) // same as movie
idx_playing = pDlgSelect->Add(g_localizeStrings.Get(36532)
+ " ("
+ GetLabelForStereoMode(playing)
+ ")");
int idx_select = pDlgSelect->Add( g_localizeStrings.Get(36531) ); // other / select
pDlgSelect->Open();
if(pDlgSelect->IsConfirmed())
{
int iItem = pDlgSelect->GetSelectedItem();
if (iItem == idx_preferred) mode = preferred;
else if (iItem == idx_mono) mode = RENDER_STEREO_MODE_MONO;
else if (iItem == idx_playing) mode = RENDER_STEREO_MODE_AUTO;
else if (iItem == idx_select) mode = GetStereoModeByUserChoice();
SetStereoModeByUser( mode );
}
CApplicationMessenger::GetInstance().SendMsg(TMSG_MEDIA_UNPAUSE);
}
break;
case STEREOSCOPIC_PLAYBACK_MODE_PREFERRED: // Stereoscopic
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, preferred);
SetStereoMode( preferred );
break;
case 2: // Mono
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, RENDER_STEREO_MODE_MONO);
SetStereoMode( RENDER_STEREO_MODE_MONO );
break;
default:
break;
}
}
void CStereoscopicsManager::OnPlaybackStopped(void)
{
RENDER_STEREO_MODE mode = GetStereoMode();
if (CServiceBroker::GetSettings().GetBool(CSettings::SETTING_VIDEOPLAYER_QUITSTEREOMODEONSTOP) && mode != RENDER_STEREO_MODE_OFF)
{
CLog::Log(LOGDEBUG, "StereoscopicsManager::%s: stereo:%d", __FUNCTION__, RENDER_STEREO_MODE_OFF);
SetStereoMode(RENDER_STEREO_MODE_OFF);
}
// reset user modes on playback end to start over new on next playback and not end up in a probably unwanted mode
if (m_stereoModeSetByUser != RENDER_STEREO_MODE_OFF)
m_lastStereoModeSetByUser = m_stereoModeSetByUser;
m_stereoModeSetByUser = RENDER_STEREO_MODE_UNDEFINED;
}<|fim▁end|> | }
switch (playbackMode)
{ |
<|file_name|>Enhance.js<|end_file_name|><|fim▁begin|>(function () {
function remap(fromValue, fromMin, fromMax, toMin, toMax) {
// Compute the range of the data
var fromRange = fromMax - fromMin,
toRange = toMax - toMin,
toValue;
// If either range is 0, then the value can only be mapped to 1 value
if (fromRange === 0) {
return toMin + toRange / 2;
}
if (toRange === 0) {
return toMin;
}
// (1) untranslate, (2) unscale, (3) rescale, (4) retranslate
toValue = (fromValue - fromMin) / fromRange;
toValue = (toRange * toValue) + toMin;
return toValue;
}
/**
* Enhance Filter. Adjusts the colors so that they span the widest
* possible range (ie 0-255). Performs w*h pixel reads and w*h pixel
* writes.
* @function
* @name Enhance
* @memberof Kinetic.Filters
* @param {Object} imageData
* @author ippo615
* @example
* node.cache();
* node.filters([Kinetic.Filters.Enhance]);
* node.enhance(0.4);
*/
Kinetic.Filters.Enhance = function (imageData) {
var data = imageData.data,
nSubPixels = data.length,
rMin = data[0], rMax = rMin, r,
gMin = data[1], gMax = gMin, g,
bMin = data[2], bMax = bMin, b,
i;
// If we are not enhancing anything - don't do any computation
var enhanceAmount = this.enhance();
if( enhanceAmount === 0 ){ return; }
// 1st Pass - find the min and max for each channel:
for (i = 0; i < nSubPixels; i += 4) {
r = data[i + 0];
if (r < rMin) { rMin = r; }
else if (r > rMax) { rMax = r; }
g = data[i + 1];
if (g < gMin) { gMin = g; } else
if (g > gMax) { gMax = g; }
b = data[i + 2];
if (b < bMin) { bMin = b; } else
if (b > bMax) { bMax = b; }
//a = data[i + 3];
//if (a < aMin) { aMin = a; } else
//if (a > aMax) { aMax = a; }
}
// If there is only 1 level - don't remap
if( rMax === rMin ){ rMax = 255; rMin = 0; }
if( gMax === gMin ){ gMax = 255; gMin = 0; }<|fim▁hole|> var rMid, rGoalMax,rGoalMin,
gMid, gGoalMax,gGoalMin,
bMid, bGoalMax,bGoalMin;
// If the enhancement is positive - stretch the histogram
if ( enhanceAmount > 0 ){
rGoalMax = rMax + enhanceAmount*(255-rMax);
rGoalMin = rMin - enhanceAmount*(rMin-0);
gGoalMax = gMax + enhanceAmount*(255-gMax);
gGoalMin = gMin - enhanceAmount*(gMin-0);
bGoalMax = bMax + enhanceAmount*(255-bMax);
bGoalMin = bMin - enhanceAmount*(bMin-0);
// If the enhancement is negative - compress the histogram
} else {
rMid = (rMax + rMin)*0.5;
rGoalMax = rMax + enhanceAmount*(rMax-rMid);
rGoalMin = rMin + enhanceAmount*(rMin-rMid);
gMid = (gMax + gMin)*0.5;
gGoalMax = gMax + enhanceAmount*(gMax-gMid);
gGoalMin = gMin + enhanceAmount*(gMin-gMid);
bMid = (bMax + bMin)*0.5;
bGoalMax = bMax + enhanceAmount*(bMax-bMid);
bGoalMin = bMin + enhanceAmount*(bMin-bMid);
}
// Pass 2 - remap everything, except the alpha
for (i = 0; i < nSubPixels; i += 4) {
data[i + 0] = remap(data[i + 0], rMin, rMax, rGoalMin, rGoalMax);
data[i + 1] = remap(data[i + 1], gMin, gMax, gGoalMin, gGoalMax);
data[i + 2] = remap(data[i + 2], bMin, bMax, bGoalMin, bGoalMax);
//data[i + 3] = remap(data[i + 3], aMin, aMax, aGoalMin, aGoalMax);
}
};
Kinetic.Factory.addGetterSetter(Kinetic.Node, 'enhance', 0, null, Kinetic.Factory.afterSetFilter);
/**
* get/set enhance. Use with {@link Kinetic.Filters.Enhance} filter.
* @name enhance
* @method
* @memberof Kinetic.Node.prototype
* @param {Float} amount
* @returns {Float}
*/
})();<|fim▁end|> | if( bMax === bMin ){ bMax = 255; bMin = 0; }
|
<|file_name|>TSDAttributeValuePairListType.java<|end_file_name|><|fim▁begin|>//
// ÀÌ ÆÄÀÏÀº JAXB(JavaTM Architecture for XML Binding) ÂüÁ¶ ±¸Çö 2.2.8-b130911.1802 ¹öÀüÀ» ÅëÇØ »ý¼ºµÇ¾ú½À´Ï´Ù.
// <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a>¸¦ ÂüÁ¶ÇϽʽÿÀ.
// ÀÌ ÆÄÀÏÀ» ¼öÁ¤ÇÏ¸é ¼Ò½º ½ºÅ°¸¶¸¦ ÀçÄÄÆÄÀÏÇÒ ¶§ ¼öÁ¤ »çÇ×ÀÌ ¼Õ½ÇµË´Ï´Ù.
// »ý¼º ³¯Â¥: 2015.07.30 ½Ã°£ 02:38:18 PM KST
//
<|fim▁hole|>
package org.gs1.source.tsd;
import java.util.ArrayList;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>TSD_AttributeValuePairListType complex type¿¡ ´ëÇÑ Java Ŭ·¡½ºÀÔ´Ï´Ù.
*
* <p>´ÙÀ½ ½ºÅ°¸¶ ´ÜÆíÀÌ ÀÌ Å¬·¡½º¿¡ Æ÷ÇԵǴ ÇÊ¿äÇÑ ÄÜÅÙÃ÷¸¦ ÁöÁ¤ÇÕ´Ï´Ù.
*
* <pre>
* <complexType name="TSD_AttributeValuePairListType">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="stringAVP" type="{urn:gs1:tsd:tsd_common:xsd:1}TSD_StringAttributeValuePairType" maxOccurs="unbounded"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "TSD_AttributeValuePairListType", namespace = "urn:gs1:tsd:tsd_common:xsd:1", propOrder = {
"stringAVP"
})
public class TSDAttributeValuePairListType {
@XmlElement(required = true)
protected List<TSDStringAttributeValuePairType> stringAVP;
/**
* Gets the value of the stringAVP property.
*
* <p>
* This accessor method returns a reference to the live list,
* not a snapshot. Therefore any modification you make to the
* returned list will be present inside the JAXB object.
* This is why there is not a <CODE>set</CODE> method for the stringAVP property.
*
* <p>
* For example, to add a new item, do as follows:
* <pre>
* getStringAVP().add(newItem);
* </pre>
*
*
* <p>
* Objects of the following type(s) are allowed in the list
* {@link TSDStringAttributeValuePairType }
*
*
*/
public List<TSDStringAttributeValuePairType> getStringAVP() {
if (stringAVP == null) {
stringAVP = new ArrayList<TSDStringAttributeValuePairType>();
}
return this.stringAVP;
}
}<|fim▁end|> | |
<|file_name|>core.ts<|end_file_name|><|fim▁begin|>/// <reference path="types.ts"/>
module ts {
export interface Map<T> {
[index: string]: T;
}
export interface StringSet extends Map<any> { }
export function forEach<T, U>(array: T[], callback: (element: T) => U): U {
var result: U;
if (array) {
for (var i = 0, len = array.length; i < len; i++) {
if (result = callback(array[i])) {
break;
}
}
}
return result;
}
export function contains<T>(array: T[], value: T): boolean {
if (array) {
for (var i = 0, len = array.length; i < len; i++) {
if (array[i] === value) {
return true;
}
}
}
return false;
}
export function indexOf<T>(array: T[], value: T): number {
if (array) {
for (var i = 0, len = array.length; i < len; i++) {
if (array[i] === value) {
return i;
}
}
}
return -1;
}
export function countWhere<T>(array: T[], predicate: (x: T) => boolean): number {
var count = 0;
if (array) {
for (var i = 0, len = array.length; i < len; i++) {
if (predicate(array[i])) {
count++;
}
}
}
return count;
}
export function filter<T>(array: T[], f: (x: T) => boolean): T[] {
if (array) {
var result: T[] = [];
for (var i = 0, len = array.length; i < len; i++) {
var item = array[i];
if (f(item)) {
result.push(item);
}
}
}
return result;
}
export function map<T, U>(array: T[], f: (x: T) => U): U[] {
if (array) {
var result: U[] = [];
for (var i = 0, len = array.length; i < len; i++) {
result.push(f(array[i]));
}
}
return result;
}
export function concatenate<T>(array1: T[], array2: T[]): T[] {
if (!array2 || !array2.length) return array1;
if (!array1 || !array1.length) return array2;
return array1.concat(array2);
}
export function uniqueElements<T>(array: T[]): T[] {
if (array) {
var result: T[] = [];
for (var i = 0, len = array.length; i < len; i++) {
var item = array[i];
if (!contains(result, item)) result.push(item);
}
}
return result;
}
export function sum(array: any[], prop: string): number {
var result = 0;
for (var i = 0; i < array.length; i++) {
result += array[i][prop];
}
return result;
}
export function binarySearch(array: number[], value: number): number {
var low = 0;
var high = array.length - 1;
while (low <= high) {
var middle = low + ((high - low) >> 1);
var midValue = array[middle];
if (midValue === value) {
return middle;
}
else if (midValue > value) {
high = middle - 1;
}
else {
low = middle + 1;
}
}
return ~low;
}
var hasOwnProperty = Object.prototype.hasOwnProperty;
export function hasProperty<T>(map: Map<T>, key: string): boolean {
return hasOwnProperty.call(map, key);
}
export function getProperty<T>(map: Map<T>, key: string): T {
return hasOwnProperty.call(map, key) ? map[key] : undefined;
}
export function isEmpty<T>(map: Map<T>) {
for (var id in map) {
if (hasProperty(map, id)) {
return false;
}
}
return true;
}
export function clone<T>(object: T): T {
var result: any = {};
for (var id in object) {
result[id] = (<any>object)[id];
}
return <T>result;
}
export function forEachValue<T, U>(map: Map<T>, callback: (value: T) => U): U {
var result: U;
for (var id in map) {
if (result = callback(map[id])) break;
}
return result;
}
export function forEachKey<T, U>(map: Map<T>, callback: (key: string) => U): U {
var result: U;
for (var id in map) {
if (result = callback(id)) break;
}
return result;
}
export function lookUp<T>(map: Map<T>, key: string): T {
return hasProperty(map, key) ? map[key] : undefined;
}
export function mapToArray<T>(map: Map<T>): T[] {
var result: T[] = [];
for (var id in map) {
result.push(map[id]);
}
return result;
}
/**
* Creates a map from the elements of an array.
*
* @param array the array of input elements.
* @param makeKey a function that produces a key for a given element.
*
* This function makes no effort to avoid collisions; if any two elements produce
* the same key with the given 'makeKey' function, then the element with the higher
* index in the array will be the one associated with the produced key.
*/
export function arrayToMap<T>(array: T[], makeKey: (value: T) => string): Map<T> {
var result: Map<T> = {};
forEach(array, value => {
result[makeKey(value)] = value;
});
return result;
}
function formatStringFromArgs(text: string, args: { [index: number]: any; }, baseIndex?: number): string {
baseIndex = baseIndex || 0;
return text.replace(/{(\d+)}/g, (match, index?) => args[+index + baseIndex]);
}
export var localizedDiagnosticMessages: Map<string> = undefined;
export function getLocaleSpecificMessage(message: string) {
return localizedDiagnosticMessages && localizedDiagnosticMessages[message]
? localizedDiagnosticMessages[message]
: message;
}
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage, ...args: any[]): Diagnostic;
export function createFileDiagnostic(file: SourceFile, start: number, length: number, message: DiagnosticMessage): Diagnostic {
Debug.assert(start >= 0, "start must be non-negative, is " + start);
Debug.assert(length >= 0, "length must be non-negative, is " + length);
var text = getLocaleSpecificMessage(message.key);
if (arguments.length > 4) {
<|fim▁hole|> file: file,
start: start,
length: length,
messageText: text,
category: message.category,
code: message.code
};
}
export function createCompilerDiagnostic(message: DiagnosticMessage, ...args: any[]): Diagnostic;
export function createCompilerDiagnostic(message: DiagnosticMessage): Diagnostic {
var text = getLocaleSpecificMessage(message.key);
if (arguments.length > 1) {
text = formatStringFromArgs(text, arguments, 1);
}
return {
file: undefined,
start: undefined,
length: undefined,
messageText: text,
category: message.category,
code: message.code
};
}
export function chainDiagnosticMessages(details: DiagnosticMessageChain, message: DiagnosticMessage, ...args: any[]): DiagnosticMessageChain;
export function chainDiagnosticMessages(details: DiagnosticMessageChain, message: DiagnosticMessage): DiagnosticMessageChain {
var text = getLocaleSpecificMessage(message.key);
if (arguments.length > 2) {
text = formatStringFromArgs(text, arguments, 2);
}
return {
messageText: text,
category: message.category,
code: message.code,
next: details
};
}
export function flattenDiagnosticChain(file: SourceFile, start: number, length: number, diagnosticChain: DiagnosticMessageChain, newLine: string): Diagnostic {
Debug.assert(start >= 0, "start must be non-negative, is " + start);
Debug.assert(length >= 0, "length must be non-negative, is " + length);
var code = diagnosticChain.code;
var category = diagnosticChain.category;
var messageText = "";
var indent = 0;
while (diagnosticChain) {
if (indent) {
messageText += newLine;
for (var i = 0; i < indent; i++) {
messageText += " ";
}
}
messageText += diagnosticChain.messageText;
indent++;
diagnosticChain = diagnosticChain.next;
}
return {
file: file,
start: start,
length: length,
code: code,
category: category,
messageText: messageText
};
}
export function compareValues<T>(a: T, b: T): number {
if (a === b) return 0;
if (a === undefined) return -1;
if (b === undefined) return 1;
return a < b ? -1 : 1;
}
function getDiagnosticFilename(diagnostic: Diagnostic): string {
return diagnostic.file ? diagnostic.file.filename : undefined;
}
export function compareDiagnostics(d1: Diagnostic, d2: Diagnostic): number {
return compareValues(getDiagnosticFilename(d1), getDiagnosticFilename(d2)) ||
compareValues(d1.start, d2.start) ||
compareValues(d1.length, d2.length) ||
compareValues(d1.code, d2.code) ||
compareValues(d1.messageText, d2.messageText) ||
0;
}
export function deduplicateSortedDiagnostics(diagnostics: Diagnostic[]): Diagnostic[] {
if (diagnostics.length < 2) {
return diagnostics;
}
var newDiagnostics = [diagnostics[0]];
var previousDiagnostic = diagnostics[0];
for (var i = 1; i < diagnostics.length; i++) {
var currentDiagnostic = diagnostics[i];
var isDupe = compareDiagnostics(currentDiagnostic, previousDiagnostic) === 0;
if (!isDupe) {
newDiagnostics.push(currentDiagnostic);
previousDiagnostic = currentDiagnostic;
}
}
return newDiagnostics;
}
export function normalizeSlashes(path: string): string {
return path.replace(/\\/g, "/");
}
// Returns length of path root (i.e. length of "/", "x:/", "//server/share/")
export function getRootLength(path: string): number {
if (path.charCodeAt(0) === CharacterCodes.slash) {
if (path.charCodeAt(1) !== CharacterCodes.slash) return 1;
var p1 = path.indexOf("/", 2);
if (p1 < 0) return 2;
var p2 = path.indexOf("/", p1 + 1);
if (p2 < 0) return p1 + 1;
return p2 + 1;
}
if (path.charCodeAt(1) === CharacterCodes.colon) {
if (path.charCodeAt(2) === CharacterCodes.slash) return 3;
return 2;
}
return 0;
}
export var directorySeparator = "/";
function getNormalizedParts(normalizedSlashedPath: string, rootLength: number) {
var parts = normalizedSlashedPath.substr(rootLength).split(directorySeparator);
var normalized: string[] = [];
for (var i = 0; i < parts.length; i++) {
var part = parts[i];
if (part !== ".") {
if (part === ".." && normalized.length > 0 && normalized[normalized.length - 1] !== "..") {
normalized.pop();
}
else {
normalized.push(part);
}
}
}
return normalized;
}
export function normalizePath(path: string): string {
var path = normalizeSlashes(path);
var rootLength = getRootLength(path);
var normalized = getNormalizedParts(path, rootLength);
return path.substr(0, rootLength) + normalized.join(directorySeparator);
}
export function getDirectoryPath(path: string) {
return path.substr(0, Math.max(getRootLength(path), path.lastIndexOf(directorySeparator)));
}
export function isUrl(path: string) {
return path && !isRootedDiskPath(path) && path.indexOf("://") !== -1;
}
export function isRootedDiskPath(path: string) {
return getRootLength(path) !== 0;
}
function normalizedPathComponents(path: string, rootLength: number) {
var normalizedParts = getNormalizedParts(path, rootLength);
return [path.substr(0, rootLength)].concat(normalizedParts);
}
export function getNormalizedPathComponents(path: string, currentDirectory: string) {
var path = normalizeSlashes(path);
var rootLength = getRootLength(path);
if (rootLength == 0) {
// If the path is not rooted it is relative to current directory
path = combinePaths(normalizeSlashes(currentDirectory), path);
rootLength = getRootLength(path);
}
return normalizedPathComponents(path, rootLength);
}
export function getNormalizedPathFromPathComponents(pathComponents: string[]) {
if (pathComponents && pathComponents.length) {
return pathComponents[0] + pathComponents.slice(1).join(directorySeparator);
}
}
function getNormalizedPathComponentsOfUrl(url: string) {
// Get root length of http://www.website.com/folder1/foler2/
// In this example the root is: http://www.website.com/
// normalized path components should be ["http://www.website.com/", "folder1", "folder2"]
var urlLength = url.length;
// Initial root length is http:// part
var rootLength = url.indexOf("://") + "://".length;
while (rootLength < urlLength) {
// Consume all immediate slashes in the protocol
// eg.initial rootlength is just file:// but it needs to consume another "/" in file:///
if (url.charCodeAt(rootLength) === CharacterCodes.slash) {
rootLength++;
}
else {
// non slash character means we continue proceeding to next component of root search
break;
}
}
// there are no parts after http:// just return current string as the pathComponent
if (rootLength === urlLength) {
return [url];
}
// Find the index of "/" after website.com so the root can be http://www.website.com/ (from existing http://)
var indexOfNextSlash = url.indexOf(directorySeparator, rootLength);
if (indexOfNextSlash !== -1) {
// Found the "/" after the website.com so the root is length of http://www.website.com/
// and get components afetr the root normally like any other folder components
rootLength = indexOfNextSlash + 1;
return normalizedPathComponents(url, rootLength);
}
else {
// Can't find the host assume the rest of the string as component
// but make sure we append "/" to it as root is not joined using "/"
// eg. if url passed in was http://website.com we want to use root as [http://website.com/]
// so that other path manipulations will be correct and it can be merged with relative paths correctly
return [url + directorySeparator];
}
}
function getNormalizedPathOrUrlComponents(pathOrUrl: string, currentDirectory: string) {
if (isUrl(pathOrUrl)) {
return getNormalizedPathComponentsOfUrl(pathOrUrl);
}
else {
return getNormalizedPathComponents(pathOrUrl, currentDirectory);
}
}
export function getRelativePathToDirectoryOrUrl(directoryPathOrUrl: string, relativeOrAbsolutePath: string, currentDirectory: string, getCanonicalFileName: (fileName: string) => string, isAbsolutePathAnUrl: boolean) {
var pathComponents = getNormalizedPathOrUrlComponents(relativeOrAbsolutePath, currentDirectory);
var directoryComponents = getNormalizedPathOrUrlComponents(directoryPathOrUrl, currentDirectory);
if (directoryComponents.length > 1 && directoryComponents[directoryComponents.length - 1] === "") {
// If the directory path given was of type test/cases/ then we really need components of directory to be only till its name
// that is ["test", "cases", ""] needs to be actually ["test", "cases"]
directoryComponents.length--;
}
// Find the component that differs
for (var joinStartIndex = 0; joinStartIndex < pathComponents.length && joinStartIndex < directoryComponents.length; joinStartIndex++) {
if (getCanonicalFileName(directoryComponents[joinStartIndex]) !== getCanonicalFileName(pathComponents[joinStartIndex])) {
break;
}
}
// Get the relative path
if (joinStartIndex) {
var relativePath = "";
var relativePathComponents = pathComponents.slice(joinStartIndex, pathComponents.length);
for (; joinStartIndex < directoryComponents.length; joinStartIndex++) {
if (directoryComponents[joinStartIndex] !== "") {
relativePath = relativePath + ".." + directorySeparator;
}
}
return relativePath + relativePathComponents.join(directorySeparator);
}
// Cant find the relative path, get the absolute path
var absolutePath = getNormalizedPathFromPathComponents(pathComponents);
if (isAbsolutePathAnUrl && isRootedDiskPath(absolutePath)) {
absolutePath = "file:///" + absolutePath;
}
return absolutePath;
}
export function getBaseFilename(path: string) {
var i = path.lastIndexOf(directorySeparator);
return i < 0 ? path : path.substring(i + 1);
}
export function combinePaths(path1: string, path2: string) {
if (!(path1 && path1.length)) return path2;
if (!(path2 && path2.length)) return path1;
if (path2.charAt(0) === directorySeparator) return path2;
if (path1.charAt(path1.length - 1) === directorySeparator) return path1 + path2;
return path1 + directorySeparator + path2;
}
export function fileExtensionIs(path: string, extension: string): boolean {
var pathLen = path.length;
var extLen = extension.length;
return pathLen > extLen && path.substr(pathLen - extLen, extLen) === extension;
}
var supportedExtensions = [".d.ts", ".ts", ".js"];
export function removeFileExtension(path: string): string {
for (var i = 0; i < supportedExtensions.length; i++) {
var ext = supportedExtensions[i];
if (fileExtensionIs(path, ext)) {
return path.substr(0, path.length - ext.length);
}
}
return path;
}
var escapedCharsRegExp = /[\t\v\f\b\0\r\n\"\\\u2028\u2029\u0085]/g;
var escapedCharsMap: Map<string> = {
"\t": "\\t",
"\v": "\\v",
"\f": "\\f",
"\b": "\\b",
"\0": "\\0",
"\r": "\\r",
"\n": "\\n",
"\"": "\\\"",
"\u2028": "\\u2028", // lineSeparator
"\u2029": "\\u2029", // paragraphSeparator
"\u0085": "\\u0085" // nextLine
};
/** NOTE: This *does not* support the full escape characters, it only supports the subset that can be used in file names
* or string literals. If the information encoded in the map changes, this needs to be revisited. */
export function escapeString(s: string): string {
return escapedCharsRegExp.test(s) ? s.replace(escapedCharsRegExp, c => {
return escapedCharsMap[c] || c;
}) : s;
}
export interface ObjectAllocator {
getNodeConstructor(kind: SyntaxKind): new () => Node;
getSymbolConstructor(): new (flags: SymbolFlags, name: string) => Symbol;
getTypeConstructor(): new (checker: TypeChecker, flags: TypeFlags) => Type;
getSignatureConstructor(): new (checker: TypeChecker) => Signature;
}
function Symbol(flags: SymbolFlags, name: string) {
this.flags = flags;
this.name = name;
this.declarations = undefined;
}
function Type(checker: TypeChecker, flags: TypeFlags) {
this.flags = flags;
}
function Signature(checker: TypeChecker) {
}
export var objectAllocator: ObjectAllocator = {
getNodeConstructor: kind => {
function Node() {
}
Node.prototype = {
kind: kind,
pos: 0,
end: 0,
flags: 0,
parent: undefined,
};
return <any>Node;
},
getSymbolConstructor: () => <any>Symbol,
getTypeConstructor: () => <any>Type,
getSignatureConstructor: () => <any>Signature
}
export enum AssertionLevel {
None = 0,
Normal = 1,
Aggressive = 2,
VeryAggressive = 3,
}
export module Debug {
var currentAssertionLevel = AssertionLevel.None;
export function shouldAssert(level: AssertionLevel): boolean {
return currentAssertionLevel >= level;
}
export function assert(expression: any, message?: string, verboseDebugInfo?: () => string): void {
if (!expression) {
var verboseDebugString = "";
if (verboseDebugInfo) {
verboseDebugString = "\r\nVerbose Debug Information: " + verboseDebugInfo();
}
throw new Error("Debug Failure. False expression: " + (message || "") + verboseDebugString);
}
}
export function fail(message?: string): void {
Debug.assert(false, message);
}
}
}<|fim▁end|> | text = formatStringFromArgs(text, arguments, 4);
}
return {
|
<|file_name|>frame.js<|end_file_name|><|fim▁begin|>var debug = require('debug')('openframe:model:Frame');
module.exports = function(Frame) {
Frame.disableRemoteMethodByName('createChangeStream');
// Remove sensitive data from Artworks being returned
// in public frames
// Frame.afterRemote('**', function(ctx, resultInstance, next) {
// debug('ctx.methodString', ctx.methodString);
// function updateResult(result) {
// if (result.current_artwork) {
// let newArtwork = {
// title: result.current_artwork.title,
// author_name: result.current_artwork.author_name
// };
// if (result.current_artwork.is_public) {
// newArtwork.id = result.current_artwork.id;
// }
// result.current_artwork(newArtwork);
// // debug(result.current_artwork);
// }
// }
// if (ctx.result) {
// if (Array.isArray(resultInstance)) {
// debug('isArray', resultInstance.length);
// ctx.result.forEach(function(result) {
// updateResult(result);
// });
// } else {
// updateResult(ctx.result);
// }
// }
// next();
// });
// Never save 'current_artwork' object into DB -- it comes from relation, via currentArtworkId
// TODO: I think this is a(nother) loopback bug, since with strict on we should be enforcing
// properties, but since this property is the name of a relation it's allowing it to be saved (?)
Frame.observe('before save', function(ctx, next) {
debug('before save', typeof ctx.instance);
if (ctx.instance) {
ctx.instance.unsetAttribute('current_artwork');
} else {
delete ctx.data.current_artwork;
}
debug('before save - MODIFIED', ctx.instance);
next();
});
// whenever a Frame model is saved, broadcast an update event
Frame.observe('after save', function(ctx, next) {
if (ctx.instance && Frame.app.pubsub) {
debug('Saved %s %s', ctx.Model.modelName, ctx.instance.id);
if (ctx.isNewInstance) {
debug('New Frame, publishing: /user/' + ctx.instance.ownerId + '/frame/new');
Frame.app.pubsub.publish('/user/' + ctx.instance.ownerId + '/frame/new', ctx.instance.id);
} else {
debug('Existing Frame, publishing: /frame/' + ctx.instance.id + '/db_updated');
// debug(ctx.instance);
Frame.findById(ctx.instance.id, { include: 'current_artwork' }, function(err, frame) {
debug(err, frame);
Frame.app.pubsub.publish('/frame/' + frame.id + '/db_updated', frame);
});
}
}
next();
});
// Ouch. Ow. Yowsers. Serisously?
function removeManagers(frame, managers) {
return new Promise((resolve, reject) => {
frame.managers(function(err, current_managers) {
debug(current_managers);
if (current_managers.length) {
var count = 0,
total = current_managers.length;
current_managers.forEach(function(cur_man) {
debug(cur_man);
if (managers.indexOf(cur_man.username) === -1) {
debug('removing %s', cur_man.username);
frame.managers.remove(cur_man, function(err) {
if (err) debug(err);
count++;
if (count === total) {
// all who are no longer present in the new list have been removed
resolve();
}
});
} else {
count++;
if (count === total) {
// all who are no longer present in the new list have been removed
resolve();
}
}
});
} else {
// there are no current managers
resolve();
}
});
});
}
// Painful painful painful -- so gross. Why loopback, why!?
function addManagers(frame, managers) {
var OpenframeUser = Frame.app.models.OpenframeUser;
return new Promise((resolve, reject) => {
OpenframeUser.find({ where: { username: { inq: managers }}}, function(err, users) {
if (err) {
debug(err);
}
var count = 0,
total = users.length;
if (total === 0) {
// no managers found by username, return frame including current managers
Frame.findById(frame.id, {include: 'managers'}, function(err, frame) {
debug(err, frame);
resolve(frame);
});
} else {
// managers found by username, add them to frame, then
// return frame including current managers
// XXX: Unfortunately loopback doesn't seem to provide a way to batch
// update hasAndBelongsToMany relationships :/
users.forEach(function(user) {
frame.managers.add(user, function(err) {
count++;
if (count === total) {
Frame.findById(frame.id, {include: 'managers'}, function(err, frame) {
debug(err, frame);
resolve(frame);
});
}
});
});
}
});
});
}
// Update managers by username
//
// XXX: This is incredibly ugly. Loopback doesn't provide a good way to update
// this type of relationship all in one go, which makes it a huge messy pain. Given
// time, I may fix this.
Frame.prototype.update_managers_by_username = function(managers, cb) {
debug(managers);
var self = this;
removeManagers(self, managers)
.then(function() {
addManagers(self, managers)
.then(function(frame) {
cb(null, frame);
})
.catch(debug);
}).catch(debug);
};
// Expose update_managers_by_username remote method
Frame.remoteMethod(
'prototype.update_managers_by_username', {
description: 'Add a related item by username for managers.',
accepts: {
arg: 'managers',
type: 'array',
http: {
source: 'body'
}
},
http: {
verb: 'put',
path: '/managers/by_username'
},
returns: {
arg: 'frame',
type: 'Object'
}
}
);
/**
* Update the current artwork by artwork ID
* @param {String} currentArtworkId
* @param {Function} callback
*/
Frame.prototype.update_current_artwork = function(currentArtworkId, cb) {
debug('update_current_artwork', currentArtworkId);
var self = this;
self.updateAttribute('currentArtworkId', currentArtworkId, function(err, instance) {
cb(err, instance);
});
};
Frame.remoteMethod(
'prototype.update_current_artwork', {
description: 'Set the current artwork for this frame',
accepts: {
arg: 'currentArtworkId',
type: 'any',
required: true,
http: {
source: 'path'
}
},
http: {
verb: 'put',
path: '/current_artwork/:currentArtworkId'
},
returns: {
arg: 'frame',
type: 'Object'
}
}
);
/**
* Override toJSON in order to remove inclusion of email address for users that are
* not the currently logged-in user.
*
* @return {Object} Plain JS Object which will be transformed to JSON for output.
*/
// Frame.prototype.toJSON = function() {
// // TODO: this seems awfully fragile... not very clear when context is available
// var ctx = loopback.getCurrentContext(),
// user = ctx.get('currentUser'),
// userId = user && user.id,
// obj = this.toObject(false, true, false);
// debug('FRAME toJSON', userId, obj);
<|fim▁hole|> // // Remove email from managers
// if (obj.managers && obj.managers.length) {
// obj.managers.forEach((manager) => {
// delete manager.email;
// });
// }
// // Remove email from owner unless it's the currently logged in user.
// if (obj.owner && userId !== obj.owner.id) {
// delete obj.owner.email;
// }
// return obj;
// };
};<|fim▁end|> | |
<|file_name|>gcc.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Mozilla Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use crate::compiler::args::*;
use crate::compiler::c::{CCompilerImpl, CCompilerKind, Language, ParsedArguments};
use crate::compiler::{clang, Cacheable, ColorMode, CompileCommand, CompilerArguments};
use crate::dist;
use crate::mock_command::{CommandCreatorSync, RunCommand};
use crate::util::{run_input_output, OsStrExt};
use log::Level::Trace;
use std::collections::HashMap;
use std::ffi::OsString;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process;
use crate::errors::*;
/// A struct on which to implement `CCompilerImpl`.
#[derive(Clone, Debug)]
pub struct GCC {
pub gplusplus: bool,
}
impl CCompilerImpl for GCC {
fn kind(&self) -> CCompilerKind {
CCompilerKind::GCC
}
fn plusplus(&self) -> bool {
self.gplusplus
}
fn parse_arguments(
&self,
arguments: &[OsString],
cwd: &Path,
) -> CompilerArguments<ParsedArguments> {
parse_arguments(arguments, cwd, &ARGS[..], self.gplusplus)
}
fn preprocess<T>(
&self,
creator: &T,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
may_dist: bool,
rewrite_includes_only: bool,
) -> SFuture<process::Output>
where
T: CommandCreatorSync,
{
preprocess(
creator,
executable,
parsed_args,
cwd,
env_vars,
may_dist,
self.kind(),
rewrite_includes_only,
)
}
fn generate_compile_commands(
&self,
path_transformer: &mut dist::PathTransformer,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
rewrite_includes_only: bool,
) -> Result<(CompileCommand, Option<dist::CompileCommand>, Cacheable)> {
generate_compile_commands(
path_transformer,
executable,
parsed_args,
cwd,
env_vars,
self.kind(),
rewrite_includes_only,
)
}
}
ArgData! { pub
TooHardFlag,
TooHard(OsString),
DiagnosticsColor(OsString),
DiagnosticsColorFlag,
NoDiagnosticsColorFlag,
// Should only be necessary for -Xclang flags - unknown flags not hidden behind
// that are assumed to not affect compilation
PassThrough(OsString),
PassThroughPath(PathBuf),
PreprocessorArgumentFlag,
PreprocessorArgument(OsString),
PreprocessorArgumentPath(PathBuf),
DoCompilation,
Output(PathBuf),
NeedDepTarget,
// Though you might think this should be a path as it's a Makefile target,
// it's not treated as a path by the compiler - it's just written wholesale
// (including any funny make syntax) into the dep file.
DepTarget(OsString),
DepArgumentPath(PathBuf),
Language(OsString),
SplitDwarf,
ProfileGenerate,
TestCoverage,
Coverage,
ExtraHashFile(PathBuf),
// Only valid for clang, but this needs to be here since clang shares gcc's arg parsing.
XClang(OsString),
}
use self::ArgData::*;
// Mostly taken from https://github.com/ccache/ccache/blob/master/src/compopt.c#L32-L84
counted_array!(pub static ARGS: [ArgInfo<ArgData>; _] = [
flag!("-", TooHardFlag),
flag!("--coverage", Coverage),
take_arg!("--param", OsString, Separated, PassThrough),
flag!("--save-temps", TooHardFlag),
take_arg!("--serialize-diagnostics", PathBuf, Separated, PassThroughPath),
take_arg!("--sysroot", PathBuf, Separated, PassThroughPath),
take_arg!("-A", OsString, Separated, PassThrough),
take_arg!("-B", PathBuf, CanBeSeparated, PassThroughPath),
take_arg!("-D", OsString, CanBeSeparated, PassThrough),
flag!("-E", TooHardFlag),
take_arg!("-F", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-G", OsString, Separated, PassThrough),
take_arg!("-I", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-L", OsString, Separated, PassThrough),
flag!("-M", TooHardFlag),
flag!("-MD", NeedDepTarget),
take_arg!("-MF", PathBuf, Separated, DepArgumentPath),
flag!("-MM", TooHardFlag),
flag!("-MMD", NeedDepTarget),
flag!("-MP", NeedDepTarget),
take_arg!("-MQ", OsString, Separated, DepTarget),
take_arg!("-MT", OsString, Separated, DepTarget),
flag!("-P", TooHardFlag),
take_arg!("-U", OsString, CanBeSeparated, PassThrough),
take_arg!("-V", OsString, Separated, PassThrough),
take_arg!("-Xassembler", OsString, Separated, PassThrough),
take_arg!("-Xlinker", OsString, Separated, PassThrough),
take_arg!("-Xpreprocessor", OsString, Separated, PreprocessorArgument),
take_arg!("-arch", OsString, Separated, PassThrough),
take_arg!("-aux-info", OsString, Separated, PassThrough),
take_arg!("-b", OsString, Separated, PassThrough),
flag!("-c", DoCompilation),
take_arg!("-fdiagnostics-color", OsString, Concatenated('='), DiagnosticsColor),
flag!("-fno-diagnostics-color", NoDiagnosticsColorFlag),
flag!("-fno-working-directory", PreprocessorArgumentFlag),
flag!("-fplugin=libcc1plugin", TooHardFlag),
flag!("-fprofile-arcs", ProfileGenerate),
flag!("-fprofile-generate", ProfileGenerate),
take_arg!("-fprofile-use", OsString, Concatenated, TooHard),
flag!("-frepo", TooHardFlag),
flag!("-fsyntax-only", TooHardFlag),
flag!("-ftest-coverage", TestCoverage),
flag!("-fworking-directory", PreprocessorArgumentFlag),
flag!("-gsplit-dwarf", SplitDwarf),
take_arg!("-idirafter", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-iframework", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-imacros", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-imultilib", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-include", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-install_name", OsString, Separated, PassThrough),
take_arg!("-iprefix", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-iquote", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-isysroot", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-isystem", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-iwithprefix", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
take_arg!("-iwithprefixbefore", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
flag!("-nostdinc", PreprocessorArgumentFlag),
flag!("-nostdinc++", PreprocessorArgumentFlag),
take_arg!("-o", PathBuf, CanBeSeparated, Output),
flag!("-remap", PreprocessorArgumentFlag),
flag!("-save-temps", TooHardFlag),
take_arg!("-stdlib", OsString, Concatenated('='), PreprocessorArgument),
flag!("-trigraphs", PreprocessorArgumentFlag),
take_arg!("-u", OsString, CanBeSeparated, PassThrough),
take_arg!("-x", OsString, CanBeSeparated, Language),
take_arg!("-z", OsString, CanBeSeparated, PassThrough),
take_arg!("@", OsString, Concatenated, TooHard),
]);
/// Parse `arguments`, determining whether it is supported.
///
/// If any of the entries in `arguments` result in a compilation that
/// cannot be cached, return `CompilerArguments::CannotCache`.
/// If the commandline described by `arguments` is not compilation,
/// return `CompilerArguments::NotCompilation`.
/// Otherwise, return `CompilerArguments::Ok(ParsedArguments)`, with
/// the `ParsedArguments` struct containing information parsed from
/// `arguments`.
pub fn parse_arguments<S>(
arguments: &[OsString],
cwd: &Path,
arg_info: S,
plusplus: bool,
) -> CompilerArguments<ParsedArguments>
where
S: SearchableArgInfo<ArgData>,
{
let mut output_arg = None;
let mut input_arg = None;
let mut dep_target = None;
let mut dep_flag = OsString::from("-MT");
let mut common_args = vec![];
let mut preprocessor_args = vec![];
let mut dependency_args = vec![];
let mut extra_hash_files = vec![];
let mut compilation = false;
let mut multiple_input = false;
let mut split_dwarf = false;
let mut need_explicit_dep_target = false;
let mut language = None;
let mut compilation_flag = OsString::new();
let mut profile_generate = false;
let mut outputs_gcno = false;
let mut xclangs: Vec<OsString> = vec![];
let mut color_mode = ColorMode::Auto;
// Custom iterator to expand `@` arguments which stand for reading a file
// and interpreting it as a list of more arguments.
let it = ExpandIncludeFile::new(cwd, arguments);
for arg in ArgsIter::new(it, arg_info) {
let arg = try_or_cannot_cache!(arg, "argument parse");
// Check if the value part of this argument begins with '@'. If so, we either
// failed to expand it, or it was a concatenated argument - either way, bail.
// We refuse to cache concatenated arguments (like "-include@foo") because they're a
// mess. See https://github.com/mozilla/sccache/issues/150#issuecomment-318586953
match arg {
Argument::WithValue(_, ref v, ArgDisposition::Separated)
| Argument::WithValue(_, ref v, ArgDisposition::CanBeConcatenated(_))
| Argument::WithValue(_, ref v, ArgDisposition::CanBeSeparated(_)) => {
if v.clone().into_arg_os_string().starts_with("@") {
cannot_cache!("@");
}
}
// Empirically, concatenated arguments appear not to interpret '@' as
// an include directive, so just continue.
Argument::WithValue(_, _, ArgDisposition::Concatenated(_))
| Argument::Raw(_)
| Argument::UnknownFlag(_)
| Argument::Flag(_, _) => {}
}
match arg.get_data() {
Some(TooHardFlag) | Some(TooHard(_)) => {
cannot_cache!(arg.flag_str().expect("Can't be Argument::Raw/UnknownFlag",))
}
Some(SplitDwarf) => split_dwarf = true,
Some(DoCompilation) => {
compilation = true;
compilation_flag =
OsString::from(arg.flag_str().expect("Compilation flag expected"));
}
Some(ProfileGenerate) => profile_generate = true,
Some(TestCoverage) => outputs_gcno = true,
Some(Coverage) => {
outputs_gcno = true;
profile_generate = true;
}
Some(DiagnosticsColorFlag) => color_mode = ColorMode::On,
Some(NoDiagnosticsColorFlag) => color_mode = ColorMode::Off,
Some(DiagnosticsColor(value)) => {
color_mode = match value.to_str().unwrap_or("auto") {
"" | "always" => ColorMode::On,
"never" => ColorMode::Off,
_ => ColorMode::Auto,
};
}
Some(Output(p)) => output_arg = Some(p.clone()),
Some(NeedDepTarget) => need_explicit_dep_target = true,
Some(DepTarget(s)) => {
dep_flag = OsString::from(arg.flag_str().expect("Dep target flag expected"));
dep_target = Some(s.clone());
}
Some(DepArgumentPath(_))
| Some(ExtraHashFile(_))
| Some(PreprocessorArgumentFlag)
| Some(PreprocessorArgument(_))
| Some(PreprocessorArgumentPath(_))
| Some(PassThrough(_))
| Some(PassThroughPath(_)) => {}
Some(Language(lang)) => {
language = match lang.to_string_lossy().as_ref() {
"c" => Some(Language::C),
"c++" => Some(Language::Cxx),
"objective-c" => Some(Language::ObjectiveC),
"objective-c++" => Some(Language::ObjectiveCxx),
"cu" => Some(Language::Cuda),
_ => cannot_cache!("-x"),
};
}
Some(XClang(s)) => xclangs.push(s.clone()),
None => match arg {
Argument::Raw(ref val) => {
if input_arg.is_some() {
multiple_input = true;
}
input_arg = Some(val.clone());
}
Argument::UnknownFlag(_) => {}
_ => unreachable!(),
},
}
let args = match arg.get_data() {
Some(SplitDwarf)
| Some(ProfileGenerate)
| Some(TestCoverage)
| Some(Coverage)
| Some(DiagnosticsColor(_))
| Some(DiagnosticsColorFlag)
| Some(NoDiagnosticsColorFlag)
| Some(PassThrough(_))
| Some(PassThroughPath(_)) => &mut common_args,
Some(ExtraHashFile(path)) => {
extra_hash_files.push(path.clone());
&mut common_args
}
Some(PreprocessorArgumentFlag)
| Some(PreprocessorArgument(_))
| Some(PreprocessorArgumentPath(_)) => &mut preprocessor_args,
Some(DepArgumentPath(_)) | Some(NeedDepTarget) => &mut dependency_args,
Some(DoCompilation) | Some(Language(_)) | Some(Output(_)) | Some(XClang(_))
| Some(DepTarget(_)) => continue,
Some(TooHardFlag) | Some(TooHard(_)) => unreachable!(),
None => match arg {
Argument::Raw(_) => continue,
Argument::UnknownFlag(_) => &mut common_args,
_ => unreachable!(),
},
};
// Normalize attributes such as "-I foo", "-D FOO=bar", as
// "-Ifoo", "-DFOO=bar", etc. and "-includefoo", "idirafterbar" as
// "-include foo", "-idirafter bar", etc.
let norm = match arg.flag_str() {
Some(s) if s.len() == 2 => NormalizedDisposition::Concatenated,
_ => NormalizedDisposition::Separated,
};
args.extend(arg.normalize(norm).iter_os_strings());
}
let xclang_it = ExpandIncludeFile::new(cwd, &xclangs);
let mut follows_plugin_arg = false;
for arg in ArgsIter::new(xclang_it, (&ARGS[..], &clang::ARGS[..])) {
let arg = try_or_cannot_cache!(arg, "argument parse");
let args = match arg.get_data() {
Some(SplitDwarf)
| Some(ProfileGenerate)
| Some(TestCoverage)
| Some(Coverage)
| Some(DoCompilation)
| Some(Language(_))
| Some(Output(_))
| Some(TooHardFlag)
| Some(XClang(_))
| Some(TooHard(_)) => cannot_cache!(arg
.flag_str()
.unwrap_or("Can't handle complex arguments through clang",)),
None => match arg {
Argument::Raw(_) if follows_plugin_arg => &mut common_args,
Argument::Raw(_) => cannot_cache!("Can't handle Raw arguments with -Xclang"),
Argument::UnknownFlag(_) => {
cannot_cache!("Can't handle UnknownFlag arguments with -Xclang")
}
_ => unreachable!(),
},
Some(DiagnosticsColor(_))
| Some(DiagnosticsColorFlag)
| Some(NoDiagnosticsColorFlag)
| Some(PassThrough(_))
| Some(PassThroughPath(_)) => &mut common_args,
Some(ExtraHashFile(path)) => {
extra_hash_files.push(path.clone());
&mut common_args
}
Some(PreprocessorArgumentFlag)
| Some(PreprocessorArgument(_))
| Some(PreprocessorArgumentPath(_)) => &mut preprocessor_args,
Some(DepTarget(_)) | Some(DepArgumentPath(_)) | Some(NeedDepTarget) => {
&mut dependency_args
}
};
follows_plugin_arg = match arg.flag_str() {
Some(s) => s == "-plugin-arg",
_ => false,
};
// Normalize attributes such as "-I foo", "-D FOO=bar", as
// "-Ifoo", "-DFOO=bar", etc. and "-includefoo", "idirafterbar" as
// "-include foo", "-idirafter bar", etc.
let norm = match arg.flag_str() {
Some(s) if s.len() == 2 => NormalizedDisposition::Concatenated,
_ => NormalizedDisposition::Separated,
};
for arg in arg.normalize(norm).iter_os_strings() {
args.push("-Xclang".into());
args.push(arg)
}
}
// We only support compilation.
if !compilation {
return CompilerArguments::NotCompilation;
}
// Can't cache compilations with multiple inputs.
if multiple_input {
cannot_cache!("multiple input files");
}
let input = match input_arg {
Some(i) => i,
// We can't cache compilation without an input.
None => cannot_cache!("no input file"),
};
let language = match language {
None => {
let mut lang = Language::from_file_name(Path::new(&input));
if let (Some(Language::C), true) = (lang, plusplus) {
lang = Some(Language::Cxx);
}
lang
}
l => l,
};
let language = match language {
Some(l) => l,
None => cannot_cache!("unknown source language"),
};
let mut outputs = HashMap::new();
let output = match output_arg {
// We can't cache compilation that doesn't go to a file
None => PathBuf::from(Path::new(&input).with_extension("o").file_name().unwrap()),
Some(o) => o,
};
if split_dwarf {
let dwo = output.with_extension("dwo");
outputs.insert("dwo", dwo);
}
if outputs_gcno {
let gcno = output.with_extension("gcno");
outputs.insert("gcno", gcno);
profile_generate = true;
}
if need_explicit_dep_target {
dependency_args.push(dep_flag);
dependency_args.push(dep_target.unwrap_or_else(|| output.clone().into_os_string()));
}
outputs.insert("obj", output);
CompilerArguments::Ok(ParsedArguments {
input: input.into(),
language,
compilation_flag,
depfile: None,
outputs,
dependency_args,
preprocessor_args,
common_args,
extra_hash_files,
msvc_show_includes: false,
profile_generate,
color_mode,
})
}
#[allow(clippy::too_many_arguments)]
pub fn preprocess<T>(
creator: &T,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
may_dist: bool,
kind: CCompilerKind,
rewrite_includes_only: bool,
) -> SFuture<process::Output>
where
T: CommandCreatorSync,
{
trace!("preprocess");
let language = match parsed_args.language {
Language::C => "c",
Language::Cxx => "c++",
Language::ObjectiveC => "objective-c",
Language::ObjectiveCxx => "objective-c++",
Language::Cuda => "cu",
};
let mut cmd = creator.clone().new_command_sync(executable);
cmd.arg("-x").arg(language).arg("-E");
// When performing distributed compilation, line number info is important for error
// reporting and to not cause spurious compilation failure (e.g. no exceptions build
// fails due to exceptions transitively included in the stdlib).
// With -fprofile-generate line number information is important, so don't use -P.
if !may_dist && !parsed_args.profile_generate {
cmd.arg("-P");
}
if rewrite_includes_only {
match kind {
CCompilerKind::Clang => {
cmd.arg("-frewrite-includes");
}
CCompilerKind::GCC => {
cmd.arg("-fdirectives-only");
}
_ => {}
}
}
cmd.arg(&parsed_args.input)
.args(&parsed_args.preprocessor_args)
.args(&parsed_args.dependency_args)
.args(&parsed_args.common_args)
.env_clear()
.envs(env_vars.iter().map(|&(ref k, ref v)| (k, v)))
.current_dir(cwd);
if log_enabled!(Trace) {
trace!("preprocess: {:?}", cmd);
}
Box::new(run_input_output(cmd, None))
}
pub fn generate_compile_commands(
path_transformer: &mut dist::PathTransformer,
executable: &Path,
parsed_args: &ParsedArguments,
cwd: &Path,
env_vars: &[(OsString, OsString)],
kind: CCompilerKind,
rewrite_includes_only: bool,
) -> Result<(CompileCommand, Option<dist::CompileCommand>, Cacheable)> {
// Unused arguments
#[cfg(not(feature = "dist-client"))]
{
let _ = path_transformer;
let _ = kind;
let _ = rewrite_includes_only;
}
trace!("compile");
let out_file = match parsed_args.outputs.get("obj") {
Some(obj) => obj,
None => return Err(anyhow!("Missing object file output")),
};
// Pass the language explicitly as we might have gotten it from the
// command line.
let language = match parsed_args.language {
Language::C => "c",
Language::Cxx => "c++",
Language::ObjectiveC => "objective-c",
Language::ObjectiveCxx => "objective-c++",
Language::Cuda => "cu",
};
let mut arguments: Vec<OsString> = vec![
"-x".into(),
language.into(),
parsed_args.compilation_flag.clone(),
parsed_args.input.clone().into(),
"-o".into(),
out_file.into(),
];
arguments.extend(parsed_args.preprocessor_args.clone());
arguments.extend(parsed_args.common_args.clone());
let command = CompileCommand {
executable: executable.to_owned(),
arguments,
env_vars: env_vars.to_owned(),
cwd: cwd.to_owned(),
};
#[cfg(not(feature = "dist-client"))]
let dist_command = None;
#[cfg(feature = "dist-client")]
let dist_command = (|| {
// https://gcc.gnu.org/onlinedocs/gcc-4.9.0/gcc/Overall-Options.html
let mut language: String = match parsed_args.language {
Language::C => "c",
Language::Cxx => "c++",
Language::ObjectiveC => "objective-c",
Language::ObjectiveCxx => "objective-c++",
Language::Cuda => "cu",
}
.into();
if !rewrite_includes_only {
match parsed_args.language {
Language::C => language = "cpp-output".into(),
_ => language.push_str("-cpp-output"),
}
}
let mut arguments: Vec<String> = vec![
"-x".into(),
language,
parsed_args.compilation_flag.clone().into_string().ok()?,
path_transformer.as_dist(&parsed_args.input)?,
"-o".into(),
path_transformer.as_dist(out_file)?,
];
if let CCompilerKind::GCC = kind {
// From https://gcc.gnu.org/onlinedocs/gcc/Preprocessor-Options.html:
//
// -fdirectives-only
//
// [...]
//
// With -fpreprocessed, predefinition of command line and most
// builtin macros is disabled. Macros such as __LINE__, which
// are contextually dependent, are handled normally. This
// enables compilation of files previously preprocessed with -E
// -fdirectives-only.
//
// Which is exactly what we do :-)
if rewrite_includes_only {
arguments.push("-fdirectives-only".into());
}
arguments.push("-fpreprocessed".into());
}
arguments.extend(dist::osstrings_to_strings(&parsed_args.common_args)?);
Some(dist::CompileCommand {
executable: path_transformer.as_dist(&executable)?,
arguments,
env_vars: dist::osstring_tuples_to_strings(env_vars)?,
cwd: path_transformer.as_dist_abs(cwd)?,
})
})();
Ok((command, dist_command, Cacheable::Yes))
}
pub struct ExpandIncludeFile<'a> {
cwd: &'a Path,
stack: Vec<OsString>,
}
impl<'a> ExpandIncludeFile<'a> {
pub fn new(cwd: &'a Path, args: &[OsString]) -> Self {
ExpandIncludeFile {
stack: args.iter().rev().map(|a| a.to_owned()).collect(),
cwd,
}
}
}
impl<'a> Iterator for ExpandIncludeFile<'a> {
type Item = OsString;
fn next(&mut self) -> Option<OsString> {
loop {
let arg = match self.stack.pop() {
Some(arg) => arg,
None => return None,
};
let file = match arg.split_prefix("@") {
Some(arg) => self.cwd.join(&arg),
None => return Some(arg),
};
// According to gcc [1], @file means:
//
// Read command-line options from file. The options read are
// inserted in place of the original @file option. If file does
// not exist, or cannot be read, then the option will be
// treated literally, and not removed.
//
// Options in file are separated by whitespace. A
// whitespace character may be included in an option by
// surrounding the entire option in either single or double
// quotes. Any character (including a backslash) may be
// included by prefixing the character to be included with
// a backslash. The file may itself contain additional
// @file options; any such options will be processed
// recursively.
//
// So here we interpret any I/O errors as "just return this
// argument". Currently we don't implement handling of arguments
// with quotes, so if those are encountered we just pass the option
// through literally anyway.
//
// At this time we interpret all `@` arguments above as non
// cacheable, so if we fail to interpret this we'll just call the
// compiler anyway.
//
// [1]: https://gcc.gnu.org/onlinedocs/gcc/Overall-Options.html#Overall-Options
let mut contents = String::new();
let res = File::open(&file).and_then(|mut f| f.read_to_string(&mut contents));
if let Err(e) = res {
debug!("failed to read @-file `{}`: {}", file.display(), e);
return Some(arg);
}
if contents.contains('"') || contents.contains('\'') {
return Some(arg);
}
let new_args = contents.split_whitespace().collect::<Vec<_>>();
self.stack.extend(new_args.iter().rev().map(|s| s.into()));
}
}
}
#[cfg(test)]
mod test {
use std::fs::File;
use std::io::Write;
use super::*;
use crate::compiler::*;
use crate::mock_command::*;
use crate::test::utils::*;
use futures::Future;
fn parse_arguments_(
arguments: Vec<String>,
plusplus: bool,
) -> CompilerArguments<ParsedArguments> {
let args = arguments.iter().map(OsString::from).collect::<Vec<_>>();
parse_arguments(&args, ".".as_ref(), &ARGS[..], plusplus)
}
#[test]
fn test_parse_arguments_simple() {
let args = stringvec!["-c", "foo.c", "-o", "foo.o"];
let ParsedArguments {
input,
language,
compilation_flag,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_eq!(Some("-c"), compilation_flag.to_str());
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert!(common_args.is_empty());
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_default_name() {
let args = stringvec!["-c", "foo.c"];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert!(common_args.is_empty());
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_default_outputdir() {
let args = stringvec!["-c", "/tmp/foo.c"];
let ParsedArguments { outputs, .. } = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
}
#[test]
fn test_parse_arguments_split_dwarf() {
let args = stringvec!["-gsplit-dwarf", "-c", "foo.cpp", "-o", "foo.o"];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.cpp"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_map_contains!(
outputs,
("obj", PathBuf::from("foo.o")),
("dwo", PathBuf::from("foo.dwo"))
);
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["-gsplit-dwarf"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_linker_options() {
let args = stringvec![
// is basically the same as `-z deps`
"-Wl,--unresolved-symbols=report-all",
"-z",
"call-nop=suffix-nop",
"-z",
"deps",
"-c",
"foo.c",
"-o",
"foo.o"
];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert_eq!(3, common_args.len());
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_coverage_outputs_gcno() {
let args = stringvec!["--coverage", "-c", "foo.cpp", "-o", "foo.o"];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
profile_generate,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.cpp"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_map_contains!(
outputs,
("obj", PathBuf::from("foo.o")),
("gcno", PathBuf::from("foo.gcno"))
);
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["--coverage"], common_args);
assert!(!msvc_show_includes);
assert!(profile_generate);
}
#[test]
fn test_parse_arguments_test_coverage_outputs_gcno() {
let args = stringvec!["-ftest-coverage", "-c", "foo.cpp", "-o", "foo.o"];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
profile_generate,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.cpp"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_map_contains!(
outputs,
("obj", PathBuf::from("foo.o")),
("gcno", PathBuf::from("foo.gcno"))
);
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["-ftest-coverage"], common_args);
assert!(!msvc_show_includes);
assert!(profile_generate);
}
<|fim▁hole|> input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
profile_generate,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.cpp"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["-fprofile-generate"], common_args);
assert!(!msvc_show_includes);
assert!(profile_generate);
}
#[test]
fn test_parse_arguments_extra() {
let args = stringvec!["-c", "foo.cc", "-fabc", "-o", "foo.o", "-mxyz"];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.cc"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["-fabc", "-mxyz"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_values() {
let args = stringvec![
"-c", "foo.cxx", "-fabc", "-I", "include", "-o", "foo.o", "-include", "file"
];
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.cxx"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert_eq!(ovec!["-Iinclude", "-include", "file"], preprocessor_args);
assert_eq!(ovec!["-fabc"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_preprocessor_args() {
let args = stringvec![
"-c",
"foo.c",
"-fabc",
"-MF",
"file",
"-o",
"foo.o",
"-MQ",
"abc",
"-nostdinc"
];
let ParsedArguments {
input,
language,
outputs,
dependency_args,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert_eq!(ovec!["-MF", "file"], dependency_args);
assert_eq!(ovec!["-nostdinc"], preprocessor_args);
assert_eq!(ovec!["-fabc"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_explicit_dep_target() {
let args =
stringvec!["-c", "foo.c", "-MT", "depfile", "-fabc", "-MF", "file", "-o", "foo.o"];
let ParsedArguments {
input,
language,
outputs,
dependency_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert_eq!(ovec!["-MF", "file"], dependency_args);
assert_eq!(ovec!["-fabc"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_explicit_dep_target_needed() {
let args = stringvec![
"-c", "foo.c", "-MT", "depfile", "-fabc", "-MF", "file", "-o", "foo.o", "-MD"
];
let ParsedArguments {
input,
language,
outputs,
dependency_args,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert_eq!(
ovec!["-MF", "file", "-MD", "-MT", "depfile"],
dependency_args
);
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["-fabc"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_explicit_mq_dep_target_needed() {
let args = stringvec![
"-c", "foo.c", "-MQ", "depfile", "-fabc", "-MF", "file", "-o", "foo.o", "-MD"
];
let ParsedArguments {
input,
language,
outputs,
dependency_args,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert_eq!(
ovec!["-MF", "file", "-MD", "-MQ", "depfile"],
dependency_args
);
assert!(preprocessor_args.is_empty());
assert_eq!(ovec!["-fabc"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_diagnostics_color() {
fn get_color_mode(color_flag: &str) -> ColorMode {
let args = stringvec!["-c", "foo.c", color_flag];
match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args.color_mode,
o => panic!("Got unexpected parse result: {:?}", o),
}
}
assert_eq!(get_color_mode("-fdiagnostics-color=always"), ColorMode::On);
assert_eq!(get_color_mode("-fdiagnostics-color=never"), ColorMode::Off);
assert_eq!(get_color_mode("-fdiagnostics-color=auto"), ColorMode::Auto);
assert_eq!(get_color_mode("-fno-diagnostics-color"), ColorMode::Off);
assert_eq!(get_color_mode("-fdiagnostics-color"), ColorMode::On);
}
#[test]
fn color_mode_preprocess() {
let args = stringvec!["-c", "foo.c", "-fdiagnostics-color"];
let args = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert!(args.common_args.contains(&"-fdiagnostics-color".into()));
}
#[test]
fn test_parse_arguments_dep_target_needed() {
let args = stringvec!["-c", "foo.c", "-fabc", "-MF", "file", "-o", "foo.o", "-MD"];
let ParsedArguments {
input,
language,
outputs,
dependency_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert_eq!(ovec!["-MF", "file", "-MD", "-MT", "foo.o"], dependency_args);
assert_eq!(ovec!["-fabc"], common_args);
assert!(!msvc_show_includes);
}
#[test]
fn test_parse_arguments_empty_args() {
assert_eq!(
CompilerArguments::NotCompilation,
parse_arguments_(vec!(), false)
);
}
#[test]
fn test_parse_arguments_not_compile() {
assert_eq!(
CompilerArguments::NotCompilation,
parse_arguments_(stringvec!["-o", "foo"], false)
);
}
#[test]
fn test_parse_arguments_too_many_inputs() {
assert_eq!(
CompilerArguments::CannotCache("multiple input files", None),
parse_arguments_(stringvec!["-c", "foo.c", "-o", "foo.o", "bar.c"], false)
);
}
#[test]
fn test_parse_arguments_link() {
assert_eq!(
CompilerArguments::NotCompilation,
parse_arguments_(
stringvec!["-shared", "foo.o", "-o", "foo.so", "bar.o"],
false
)
);
}
#[test]
fn test_parse_arguments_pgo() {
assert_eq!(
CompilerArguments::CannotCache("-fprofile-use", None),
parse_arguments_(
stringvec!["-c", "foo.c", "-fprofile-use", "-o", "foo.o"],
false
)
);
assert_eq!(
CompilerArguments::CannotCache("-fprofile-use", None),
parse_arguments_(
stringvec!["-c", "foo.c", "-fprofile-use=file", "-o", "foo.o"],
false
)
);
}
#[test]
fn test_parse_arguments_response_file() {
assert_eq!(
CompilerArguments::CannotCache("@", None),
parse_arguments_(stringvec!["-c", "foo.c", "@foo", "-o", "foo.o"], false)
);
assert_eq!(
CompilerArguments::CannotCache("@", None),
parse_arguments_(stringvec!["-c", "foo.c", "-o", "@foo"], false)
);
}
#[test]
fn at_signs() {
let td = tempfile::Builder::new()
.prefix("sccache")
.tempdir()
.unwrap();
File::create(td.path().join("foo"))
.unwrap()
.write_all(
b"\
-c foo.c -o foo.o\
",
)
.unwrap();
let arg = format!("@{}", td.path().join("foo").display());
let ParsedArguments {
input,
language,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(vec![arg], false) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::C, language);
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert!(common_args.is_empty());
assert!(!msvc_show_includes);
}
#[test]
fn test_compile_simple() {
let creator = new_creator();
let f = TestFixture::new();
let parsed_args = ParsedArguments {
input: "foo.c".into(),
language: Language::C,
compilation_flag: "-c".into(),
depfile: None,
outputs: vec![("obj", "foo.o".into())].into_iter().collect(),
dependency_args: vec![],
preprocessor_args: vec![],
common_args: vec![],
extra_hash_files: vec![],
msvc_show_includes: false,
profile_generate: false,
color_mode: ColorMode::Auto,
};
let compiler = &f.bins[0];
// Compiler invocation.
next_command(&creator, Ok(MockChild::new(exit_status(0), "", "")));
let mut path_transformer = dist::PathTransformer::default();
let (command, dist_command, cacheable) = generate_compile_commands(
&mut path_transformer,
&compiler,
&parsed_args,
f.tempdir.path(),
&[],
CCompilerKind::GCC,
false,
)
.unwrap();
#[cfg(feature = "dist-client")]
assert!(dist_command.is_some());
#[cfg(not(feature = "dist-client"))]
assert!(dist_command.is_none());
let _ = command.execute(&creator).wait();
assert_eq!(Cacheable::Yes, cacheable);
// Ensure that we ran all processes.
assert_eq!(0, creator.lock().unwrap().children.len());
}
#[test]
fn test_parse_arguments_plusplus() {
let args = stringvec!["-c", "foo.c", "-o", "foo.o"];
let ParsedArguments {
input,
language,
compilation_flag,
outputs,
preprocessor_args,
msvc_show_includes,
common_args,
..
} = match parse_arguments_(args, true) {
CompilerArguments::Ok(args) => args,
o => panic!("Got unexpected parse result: {:?}", o),
};
assert_eq!(Some("foo.c"), input.to_str());
assert_eq!(Language::Cxx, language);
assert_eq!(Some("-c"), compilation_flag.to_str());
assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o")));
assert!(preprocessor_args.is_empty());
assert!(common_args.is_empty());
assert!(!msvc_show_includes);
}
}<|fim▁end|> | #[test]
fn test_parse_arguments_profile_generate() {
let args = stringvec!["-fprofile-generate", "-c", "foo.cpp", "-o", "foo.o"];
let ParsedArguments { |
<|file_name|>configs.js<|end_file_name|><|fim▁begin|>import "./main";
import "core/utils";
import "bootstrap/js/dist/tab";
import dayjs from "dayjs";
import advancedFormat from "dayjs/plugin/advancedFormat";
import utc from "dayjs/plugin/utc";
import timezone from "dayjs/plugin/timezone";
import timezones from "../timezones";
import CTFd from "core/CTFd";
import { default as helpers } from "core/helpers";
import $ from "jquery";
import { ezQuery, ezProgressBar, ezAlert } from "core/ezq";
import CodeMirror from "codemirror";
import "codemirror/mode/htmlmixed/htmlmixed.js";
import Vue from "vue/dist/vue.esm.browser";
import FieldList from "../components/configs/fields/FieldList.vue";
dayjs.extend(advancedFormat);
dayjs.extend(utc);
dayjs.extend(timezone);
function loadTimestamp(place, timestamp) {
if (typeof timestamp == "string") {
timestamp = parseInt(timestamp, 10) * 1000;
}
const d = dayjs(timestamp);
$("#" + place + "-month").val(d.month() + 1); // Months are zero indexed (https://day.js.org/docs/en/get-set/month)
$("#" + place + "-day").val(d.date());
$("#" + place + "-year").val(d.year());
$("#" + place + "-hour").val(d.hour());
$("#" + place + "-minute").val(d.minute());
loadDateValues(place);
}
function loadDateValues(place) {
const month = $("#" + place + "-month").val();
const day = $("#" + place + "-day").val();
const year = $("#" + place + "-year").val();
const hour = $("#" + place + "-hour").val();
const minute = $("#" + place + "-minute").val();
const timezone_string = $("#" + place + "-timezone").val();
const utc = convertDateToMoment(month, day, year, hour, minute);
if (utc.unix() && month && day && year && hour && minute) {
$("#" + place).val(utc.unix());
$("#" + place + "-local").val(
utc.format("dddd, MMMM Do YYYY, h:mm:ss a z (zzz)")
);
$("#" + place + "-zonetime").val(
utc.tz(timezone_string).format("dddd, MMMM Do YYYY, h:mm:ss a z (zzz)")
);
} else {
$("#" + place).val("");
$("#" + place + "-local").val("");
$("#" + place + "-zonetime").val("");
}
}
function convertDateToMoment(month, day, year, hour, minute) {
let month_num = month.toString();
if (month_num.length == 1) {
month_num = "0" + month_num;
}
let day_str = day.toString();
if (day_str.length == 1) {
day_str = "0" + day_str;
}
let hour_str = hour.toString();
if (hour_str.length == 1) {
hour_str = "0" + hour_str;
}
let min_str = minute.toString();
if (min_str.length == 1) {
min_str = "0" + min_str;
}
// 2013-02-08 24:00
const date_string =
year.toString() +
"-" +
month_num +
"-" +
day_str +
" " +
hour_str +
":" +
min_str +
":00";
return dayjs(date_string);
}
function updateConfigs(event) {
event.preventDefault();
const obj = $(this).serializeJSON();
const params = {};
if (obj.mail_useauth === false) {
obj.mail_username = null;
obj.mail_password = null;
} else {
if (obj.mail_username === "") {
delete obj.mail_username;
}
if (obj.mail_password === "") {
delete obj.mail_password;
}
}
Object.keys(obj).forEach(function(x) {
if (obj[x] === "true") {
params[x] = true;
} else if (obj[x] === "false") {
params[x] = false;
} else {
params[x] = obj[x];
}
});
CTFd.api.patch_config_list({}, params).then(function(_response) {
if (_response.success) {
window.location.reload();
} else {
let errors = _response.errors.value.join("\n");
ezAlert({
title: "Error!",
body: errors,
button: "Okay"
});
}
});
}
function uploadLogo(event) {
event.preventDefault();
let form = event.target;
helpers.files.upload(form, {}, function(response) {
const f = response.data[0];
const params = {
value: f.location
};
CTFd.fetch("/api/v1/configs/ctf_logo", {
method: "PATCH",
body: JSON.stringify(params)
})
.then(function(response) {
return response.json();
})
.then(function(response) {
if (response.success) {
window.location.reload();
} else {
ezAlert({
title: "Error!",
body: "Logo uploading failed!",
button: "Okay"
});
}
});
});
}
function switchUserMode(event) {
event.preventDefault();
if (
confirm(
"Are you sure you'd like to switch user modes?\n\nAll user submissions, awards, unlocks, and tracking will be deleted!"
)
) {
let formData = new FormData();
formData.append("submissions", true);
formData.append("nonce", CTFd.config.csrfNonce);
fetch(CTFd.config.urlRoot + "/admin/reset", {
method: "POST",
credentials: "same-origin",
body: formData
});
// Bind `this` so that we can reuse the updateConfigs function
let binded = updateConfigs.bind(this);
binded(event);
}
}
function removeLogo() {
ezQuery({
title: "Remove logo",
body: "Are you sure you'd like to remove the CTF logo?",
success: function() {
const params = {
value: null
};
CTFd.api
.patch_config({ configKey: "ctf_logo" }, params)
.then(_response => {
window.location.reload();
});
}
});
}
function smallIconUpload(event) {
event.preventDefault();
let form = event.target;
helpers.files.upload(form, {}, function(response) {
const f = response.data[0];
const params = {
value: f.location
};
CTFd.fetch("/api/v1/configs/ctf_small_icon", {
method: "PATCH",
body: JSON.stringify(params)
})
.then(function(response) {
return response.json();
})
.then(function(response) {
if (response.success) {
window.location.reload();
} else {
ezAlert({
title: "Error!",
body: "Icon uploading failed!",
button: "Okay"
});
}
});
});
}
<|fim▁hole|> success: function() {
const params = {
value: null
};
CTFd.api
.patch_config({ configKey: "ctf_small_icon" }, params)
.then(_response => {
window.location.reload();
});
}
});
}
function importCSV(event) {
event.preventDefault();
let csv_file = document.getElementById("import-csv-file").files[0];
let csv_type = document.getElementById("import-csv-type").value;
let form_data = new FormData();
form_data.append("csv_file", csv_file);
form_data.append("csv_type", csv_type);
form_data.append("nonce", CTFd.config.csrfNonce);
let pg = ezProgressBar({
width: 0,
title: "Upload Progress"
});
$.ajax({
url: CTFd.config.urlRoot + "/admin/import/csv",
type: "POST",
data: form_data,
processData: false,
contentType: false,
statusCode: {
500: function(resp) {
// Normalize errors
let errors = JSON.parse(resp.responseText);
let errorText = "";
errors.forEach(element => {
errorText += `Line ${element[0]}: ${JSON.stringify(element[1])}\n`;
});
// Show errors
alert(errorText);
// Hide progress modal if its there
pg = ezProgressBar({
target: pg,
width: 100
});
setTimeout(function() {
pg.modal("hide");
}, 500);
}
},
xhr: function() {
let xhr = $.ajaxSettings.xhr();
xhr.upload.onprogress = function(e) {
if (e.lengthComputable) {
let width = (e.loaded / e.total) * 100;
pg = ezProgressBar({
target: pg,
width: width
});
}
};
return xhr;
},
success: function(_data) {
pg = ezProgressBar({
target: pg,
width: 100
});
setTimeout(function() {
pg.modal("hide");
}, 500);
setTimeout(function() {
window.location.reload();
}, 700);
}
});
}
function importConfig(event) {
event.preventDefault();
let import_file = document.getElementById("import-file").files[0];
let form_data = new FormData();
form_data.append("backup", import_file);
form_data.append("nonce", CTFd.config.csrfNonce);
let pg = ezProgressBar({
width: 0,
title: "Upload Progress"
});
$.ajax({
url: CTFd.config.urlRoot + "/admin/import",
type: "POST",
data: form_data,
processData: false,
contentType: false,
statusCode: {
500: function(resp) {
alert(resp.responseText);
}
},
xhr: function() {
let xhr = $.ajaxSettings.xhr();
xhr.upload.onprogress = function(e) {
if (e.lengthComputable) {
let width = (e.loaded / e.total) * 100;
pg = ezProgressBar({
target: pg,
width: width
});
}
};
return xhr;
},
success: function(_data) {
pg = ezProgressBar({
target: pg,
width: 100
});
setTimeout(function() {
pg.modal("hide");
}, 500);
setTimeout(function() {
window.location.reload();
}, 700);
}
});
}
function exportConfig(event) {
event.preventDefault();
window.location.href = $(this).attr("href");
}
function insertTimezones(target) {
let current = $("<option>").text(dayjs.tz.guess());
$(target).append(current);
let tz_names = timezones;
for (let i = 0; i < tz_names.length; i++) {
let tz = $("<option>").text(tz_names[i]);
$(target).append(tz);
}
}
$(() => {
const theme_header_editor = CodeMirror.fromTextArea(
document.getElementById("theme-header"),
{
lineNumbers: true,
lineWrapping: true,
mode: "htmlmixed",
htmlMode: true
}
);
const theme_footer_editor = CodeMirror.fromTextArea(
document.getElementById("theme-footer"),
{
lineNumbers: true,
lineWrapping: true,
mode: "htmlmixed",
htmlMode: true
}
);
const theme_settings_editor = CodeMirror.fromTextArea(
document.getElementById("theme-settings"),
{
lineNumbers: true,
lineWrapping: true,
readOnly: true,
mode: { name: "javascript", json: true }
}
);
// Handle refreshing codemirror when switching tabs.
// Better than the autorefresh approach b/c there's no flicker
$("a[href='#theme']").on("shown.bs.tab", function(_e) {
theme_header_editor.refresh();
theme_footer_editor.refresh();
theme_settings_editor.refresh();
});
$(
"a[href='#legal'], a[href='#tos-config'], a[href='#privacy-policy-config']"
).on("shown.bs.tab", function(_e) {
$("#tos-config .CodeMirror").each(function(i, el) {
el.CodeMirror.refresh();
});
$("#privacy-policy-config .CodeMirror").each(function(i, el) {
el.CodeMirror.refresh();
});
});
$("#theme-settings-modal form").submit(function(e) {
e.preventDefault();
theme_settings_editor
.getDoc()
.setValue(JSON.stringify($(this).serializeJSON(), null, 2));
$("#theme-settings-modal").modal("hide");
});
$("#theme-settings-button").click(function() {
let form = $("#theme-settings-modal form");
let data;
// Ignore invalid JSON data
try {
data = JSON.parse(theme_settings_editor.getValue());
} catch (e) {
data = {};
}
$.each(data, function(key, value) {
var ctrl = form.find(`[name='${key}']`);
switch (ctrl.prop("type")) {
case "radio":
case "checkbox":
ctrl.each(function() {
if ($(this).attr("value") == value) {
$(this).attr("checked", value);
}
});
break;
default:
ctrl.val(value);
}
});
$("#theme-settings-modal").modal();
});
insertTimezones($("#start-timezone"));
insertTimezones($("#end-timezone"));
insertTimezones($("#freeze-timezone"));
$(".config-section > form:not(.form-upload, .custom-config-form)").submit(
updateConfigs
);
$("#logo-upload").submit(uploadLogo);
$("#user-mode-form").submit(switchUserMode);
$("#remove-logo").click(removeLogo);
$("#ctf-small-icon-upload").submit(smallIconUpload);
$("#remove-small-icon").click(removeSmallIcon);
$("#export-button").click(exportConfig);
$("#import-button").click(importConfig);
$("#import-csv-form").submit(importCSV);
$("#config-color-update").click(function() {
const hex_code = $("#config-color-picker").val();
const user_css = theme_header_editor.getValue();
let new_css;
if (user_css.length) {
let css_vars = `theme-color: ${hex_code};`;
new_css = user_css.replace(/theme-color: (.*);/, css_vars);
} else {
new_css =
`<style id="theme-color">\n` +
`:root {--theme-color: ${hex_code};}\n` +
`.navbar{background-color: var(--theme-color) !important;}\n` +
`.jumbotron{background-color: var(--theme-color) !important;}\n` +
`</style>\n`;
}
theme_header_editor.getDoc().setValue(new_css);
});
$(".start-date").change(function() {
loadDateValues("start");
});
$(".end-date").change(function() {
loadDateValues("end");
});
$(".freeze-date").change(function() {
loadDateValues("freeze");
});
const start = $("#start").val();
const end = $("#end").val();
const freeze = $("#freeze").val();
if (start) {
loadTimestamp("start", start);
}
if (end) {
loadTimestamp("end", end);
}
if (freeze) {
loadTimestamp("freeze", freeze);
}
// Toggle username and password based on stored value
$("#mail_useauth")
.change(function() {
$("#mail_username_password").toggle(this.checked);
})
.change();
// Insert FieldList element for users
const fieldList = Vue.extend(FieldList);
let userVueContainer = document.createElement("div");
document.querySelector("#user-field-list").appendChild(userVueContainer);
new fieldList({
propsData: {
type: "user"
}
}).$mount(userVueContainer);
// Insert FieldList element for teams
let teamVueContainer = document.createElement("div");
document.querySelector("#team-field-list").appendChild(teamVueContainer);
new fieldList({
propsData: {
type: "team"
}
}).$mount(teamVueContainer);
});<|fim▁end|> | function removeSmallIcon() {
ezQuery({
title: "Remove logo",
body: "Are you sure you'd like to remove the small site icon?", |
<|file_name|>qgis-sample-QgsColorWheel.py<|end_file_name|><|fim▁begin|># coding: utf-8
from qgis.gui import QgsColorWheel
color_wheel = QgsColorWheel()
def on_color_wheel_changed(color):
print(color)<|fim▁hole|>color_wheel.show()<|fim▁end|> |
color_wheel.colorChanged.connect(on_color_wheel_changed)
|
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>#
# Copyright (C) 2014 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Forms for PortAdmin"""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Layout, Row, Column, Submit
class SearchForm(forms.Form):
"""Form for searching for ip-devices and interfaces"""
query = forms.CharField(
label='',<|fim▁hole|> super(SearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_action = 'portadmin-index'
self.helper.form_method = 'GET'
self.helper.layout = Layout(
Row(
Column('query', css_class='medium-9'),
Column(Submit('submit', 'Search', css_class='postfix'),
css_class='medium-3'),
css_class='collapse'
)
)<|fim▁end|> | widget=forms.TextInput(
attrs={'placeholder': 'Search for ip device or interface'}))
def __init__(self, *args, **kwargs): |
<|file_name|>WithEditorUser.java<|end_file_name|><|fim▁begin|>/*
* Copyright © 2016 - 2017 Dominik Szalai ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cz.muni.fi.editor.support;<|fim▁hole|>
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import org.springframework.core.annotation.AliasFor;
import org.springframework.security.test.context.support.WithSecurityContext;
/**
* @author Dominik Szalai - emptulik at gmail.com on 10.8.2016.
*/
@Target({ElementType.METHOD, ElementType.TYPE})
@Retention(RetentionPolicy.RUNTIME)
@Inherited
@Documented
@WithSecurityContext(
factory = TestSecurityContextFactory.class
)
public @interface WithEditorUser {
// the owner is always user with ID 1
@AliasFor("value")
long id() default 1L;
@AliasFor("id")
long value() default 1L;
boolean mock() default false;
}<|fim▁end|> | |
<|file_name|>press_exit.rs<|end_file_name|><|fim▁begin|>use poke_a_mango::ops::{Difficulty, GameState, Leader};
use poke_a_mango::ops::state;
#[test]
fn from_main_menu() {
let mut s = GameState::MainMenu;
state::press_exit(&mut s);
assert_eq!(s, GameState::Exit);
}
#[test]
fn from_choose_difficulty() {
let mut s = GameState::ChooseDifficulty;
state::press_exit(&mut s);
assert_eq!(s, GameState::ChooseDifficulty);
}
#[test]
fn from_playing() {
let state = GameState::Playing {
difficulty: Difficulty::Easy,
score: 101,
fruit: None,
};<|fim▁hole|> assert_eq!(s, state);
}
#[test]
fn from_choose_game_over() {
let state = GameState::GameOver {
difficulty: Difficulty::Easy,
score: 51,
name: "наб".to_string(),
};
let mut s = state.clone();
state::press_exit(&mut s);
assert_eq!(s, state);
}
#[test]
fn from_choose_game_ended() {
let state = GameState::GameEnded {
score: 25,
name: "наб".to_string(),
};
let mut s = state.clone();
state::press_exit(&mut s);
assert_eq!(s, state);
}
#[test]
fn from_choose_load_leaderboard() {
let mut s = GameState::LoadLeaderboard;
state::press_exit(&mut s);
assert_eq!(s, GameState::LoadLeaderboard);
}
#[test]
fn from_choose_display_leaderboard() {
let state = GameState::DisplayLeaderboard(vec![Leader::now("наб".to_string(), 25)]);
let mut s = state.clone();
state::press_exit(&mut s);
assert_eq!(s, state);
}
#[test]
fn from_choose_exit() {
let mut s = GameState::Exit;
state::press_exit(&mut s);
assert_eq!(s, GameState::Exit);
}<|fim▁end|> |
let mut s = state.clone();
state::press_exit(&mut s); |
<|file_name|>dutch.py<|end_file_name|><|fim▁begin|>from .features import Dictionary, RegexMatches, Stemmed, Stopwords
name = "dutch"
try:
import enchant
dictionary = enchant.Dict("nl")
except enchant.errors.DictNotFoundError:
raise ImportError("No enchant-compatible dictionary found for 'nl'. " +
"Consider installing 'myspell-nl'.")
dictionary = Dictionary(name + ".dictionary", dictionary.check)
"""
:class:`~revscoring.languages.features.Dictionary` features via
:class:`enchant.Dict` "nl". Provided by `myspell-nl`
"""
try:
from nltk.corpus import stopwords as nltk_stopwords
stopwords = set(nltk_stopwords.words('dutch'))
except LookupError:
raise ImportError("Could not load stopwords for {0}. ".format(__name__) +
"You may need to install the nltk 'stopwords' " +
"corpora. See http://www.nltk.org/data.html")
stopwords = Stopwords(name + ".stopwords", stopwords)
"""
:class:`~revscoring.languages.features.Stopwords` features provided by
:func:`nltk.corpus.stopwords` "dutch"
"""
try:
from nltk.stem.snowball import SnowballStemmer
stemmer = SnowballStemmer("dutch")
except ValueError:
raise ImportError("Could not load stemmer for {0}. ".format(__name__))
stemmed = Stemmed(name + ".stemmed", stemmer.stem)<|fim▁hole|>
badword_regexes = [
r"aars",
r"an(aal|us)\w*",
r"balhaar",
r"drol(len)?",
r"fack(en|ing|s)?", "facking",
r"flikkers?",
r"focking",
r"ge(ile?|lul)",
r"geneukt",
r"hoer(en?)?",
r"homos?",
r"kaka?",
r"kak(hoofd|ken)",
r"k[ae]nker",
r"klootzak(ken)?",
r"klote",
r"kont(gat|je)?",
r"pedo",
r"penis(sen)?",
r"peop",
r"piemels?",
r"pijpen",
r"pik",
r"pimel",
r"pipi",
r"poep(chinees?|en|hoofd)?",
r"poep(ie|je|sex|te?)s?",
r"porno?",
r"neuke?",
r"neuken(de)?",
r"neukt(en?)?",
r"stron(d|t)",
r"suck(s|t)?",
r"zuigt",
r"sukkels?",
r"ter(ing|ten)", "tetten",
r"tieten",
r"vagina",
r"verekte",
r"verkracht",
r"dikzak",
r"dildo",
r"mon?g(olen|ool)?", "mooiboy",
r"negers?",
r"shit",
r"sperma",
r"kut(jes?)?",
r"stelletje",
r"losers?",
r"lul(len)?",
r"reet",
r"scheet", "scheten", r"schijt",
r"diaree",
r"slet",
r"lekkerding",
r"likken"
]
badwords = RegexMatches(name + ".badwords", badword_regexes)
"""
:class:`~revscoring.languages.features.RegexMatches` features via a list of
badword detecting regexes.
"""
informal_regexes = [
r"aap(jes)?",
r"banaan",
r"bent",
r"boe(it)?",
r"doei"
r"dombo",
r"domme",
r"eigelijk",
r"godverdomme",
r"groetjes",
r"gwn",
r"hoi",
r"hal+o+",
r"heb",
r"hee+[jyl]", r"heee+?l",
r"houd?",
r"(?:hoi+)+",
r"hoor",
r"izan",
r"jij",
r"jou",
r"jullie",
r"kaas",
r"klopt",
r"kots",
r"kusjes",
r"le?kke?re?",
r"maarja",
r"mama",
r"nou",
r"oma",
r"ofzo",
r"oke",
r"sexy?",
r"snap",
r"stink(en|t)",
r"stoer",
r"swag",
r"swek",
r"vies", "vieze",
r"vind",
r"vuile",
r"xxx",
r"yeah",
r"zielig",
r"zooi",
r"yolo",
r"zeg"
]
informals = RegexMatches(name + ".informals", informal_regexes)
"""
:class:`~revscoring.languages.features.RegexMatches` features via a list of
informal word detecting regexes.
"""<|fim▁end|> | """
:class:`~revscoring.languages.features.Stemmed` word features via
:class:`nltk.stem.snowball.SnowballStemmer` "dutch"
""" |
<|file_name|>json.go<|end_file_name|><|fim▁begin|>// Package json implements a JSON handler.
package json
import (
j "encoding/json"
"io"
"os"
"sync"
"github.com/apex/log"
)<|fim▁hole|>
// Handler implementation.
type Handler struct {
mu sync.Mutex
enc *j.Encoder
}
// New handler.
func New(w io.Writer) *Handler {
return &Handler{
enc: j.NewEncoder(w),
}
}
// HandleLog implements log.Handler.
func (h *Handler) HandleLog(e *log.Entry) error {
h.mu.Lock()
defer h.mu.Unlock()
return h.enc.Encode(e)
}<|fim▁end|> |
// Default handler outputting to stderr.
var Default = New(os.Stderr) |
<|file_name|>item.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import itertools
import json
import erpnext
import frappe
import copy
from erpnext.controllers.item_variant import (ItemVariantExistsError,
copy_attributes_to_variant, get_variant, make_variant_item_code, validate_item_variant_attributes)
from erpnext.setup.doctype.item_group.item_group import (get_parent_item_groups, invalidate_cache_for)
from frappe import _, msgprint
from frappe.utils import (cint, cstr, flt, formatdate, get_timestamp, getdate,
now_datetime, random_string, strip)
from frappe.utils.html_utils import clean_html
from frappe.website.doctype.website_slideshow.website_slideshow import \
get_slideshow
from frappe.website.render import clear_cache
from frappe.website.website_generator import WebsiteGenerator
from six import iteritems
class DuplicateReorderRows(frappe.ValidationError):
pass
class StockExistsForTemplate(frappe.ValidationError):
pass
class InvalidBarcode(frappe.ValidationError):
pass
class Item(WebsiteGenerator):
website = frappe._dict(
page_title_field="item_name",
condition_field="show_in_website",
template="templates/generators/item.html",
no_cache=1
)
def onload(self):
super(Item, self).onload()
self.set_onload('stock_exists', self.stock_ledger_created())
self.set_asset_naming_series()
def set_asset_naming_series(self):
if not hasattr(self, '_asset_naming_series'):
from erpnext.assets.doctype.asset.asset import get_asset_naming_series
self._asset_naming_series = get_asset_naming_series()
self.set_onload('asset_naming_series', self._asset_naming_series)
def autoname(self):
if frappe.db.get_default("item_naming_by") == "Naming Series":
if self.variant_of:
if not self.item_code:
template_item_name = frappe.db.get_value("Item", self.variant_of, "item_name")
self.item_code = make_variant_item_code(self.variant_of, template_item_name, self)
else:
from frappe.model.naming import set_name_by_naming_series
set_name_by_naming_series(self)
self.item_code = self.name
self.item_code = strip(self.item_code)
self.name = self.item_code
def before_insert(self):
if not self.description:
self.description = self.item_name
# if self.is_sales_item and not self.get('is_item_from_hub'):
# self.publish_in_hub = 1
def after_insert(self):
'''set opening stock and item price'''
if self.standard_rate:
for default in self.item_defaults:
self.add_price(default.default_price_list)
if self.opening_stock:
self.set_opening_stock()
def validate(self):
self.get_doc_before_save()
super(Item, self).validate()
if not self.item_name:
self.item_name = self.item_code
if not self.description:
self.description = self.item_name
self.validate_uom()
self.validate_description()
self.add_default_uom_in_conversion_factor_table()
self.validate_conversion_factor()
self.validate_item_type()
self.check_for_active_boms()
self.fill_customer_code()
self.check_item_tax()
self.validate_barcode()
self.validate_warehouse_for_reorder()
self.update_bom_item_desc()
self.synced_with_hub = 0
self.validate_has_variants()
self.validate_stock_exists_for_template_item()
self.validate_attributes()
self.validate_variant_attributes()
self.validate_variant_based_on_change()
self.validate_website_image()
self.make_thumbnail()
self.validate_fixed_asset()
self.validate_retain_sample()
self.validate_uom_conversion_factor()
self.validate_item_defaults()
self.update_defaults_from_item_group()
self.validate_stock_for_has_batch_and_has_serial()
if not self.get("__islocal"):
self.old_item_group = frappe.db.get_value(self.doctype, self.name, "item_group")
self.old_website_item_groups = frappe.db.sql_list("""select item_group
from `tabWebsite Item Group`
where parentfield='website_item_groups' and parenttype='Item' and parent=%s""", self.name)
def on_update(self):
invalidate_cache_for_item(self)
self.validate_name_with_item_group()
self.update_variants()
self.update_item_price()
self.update_template_item()
def validate_description(self):
'''Clean HTML description if set'''
if cint(frappe.db.get_single_value('Stock Settings', 'clean_description_html')):
self.description = clean_html(self.description)
def add_price(self, price_list=None):
'''Add a new price'''
if not price_list:
price_list = (frappe.db.get_single_value('Selling Settings', 'selling_price_list')
or frappe.db.get_value('Price List', _('Standard Selling')))
if price_list:
item_price = frappe.get_doc({
"doctype": "Item Price",
"price_list": price_list,
"item_code": self.name,
"currency": erpnext.get_default_currency(),
"price_list_rate": self.standard_rate
})
item_price.insert()
def set_opening_stock(self):
'''set opening stock'''
if not self.is_stock_item or self.has_serial_no or self.has_batch_no:
return
if not self.valuation_rate and self.standard_rate:
self.valuation_rate = self.standard_rate
if not self.valuation_rate:
frappe.throw(_("Valuation Rate is mandatory if Opening Stock entered"))
from erpnext.stock.doctype.stock_entry.stock_entry_utils import make_stock_entry
# default warehouse, or Stores
for default in self.item_defaults:
default_warehouse = (default.default_warehouse
or frappe.db.get_single_value('Stock Settings', 'default_warehouse')
or frappe.db.get_value('Warehouse', {'warehouse_name': _('Stores')}))
if default_warehouse:
stock_entry = make_stock_entry(item_code=self.name, target=default_warehouse, qty=self.opening_stock,
rate=self.valuation_rate, company=default.company)
stock_entry.add_comment("Comment", _("Opening Stock"))
def make_route(self):
if not self.route:
return cstr(frappe.db.get_value('Item Group', self.item_group,
'route')) + '/' + self.scrub((self.item_name if self.item_name else self.item_code) + '-' + random_string(5))
def validate_website_image(self):
if frappe.flags.in_import:
return
"""Validate if the website image is a public file"""
auto_set_website_image = False
if not self.website_image and self.image:
auto_set_website_image = True
self.website_image = self.image
if not self.website_image:
return
# find if website image url exists as public
file_doc = frappe.get_all("File", filters={
"file_url": self.website_image
}, fields=["name", "is_private"], order_by="is_private asc", limit_page_length=1)
if file_doc:
file_doc = file_doc[0]
if not file_doc:
if not auto_set_website_image:
frappe.msgprint(_("Website Image {0} attached to Item {1} cannot be found").format(self.website_image, self.name))
self.website_image = None
elif file_doc.is_private:
if not auto_set_website_image:
frappe.msgprint(_("Website Image should be a public file or website URL"))
self.website_image = None
def make_thumbnail(self):
if frappe.flags.in_import:
return
"""Make a thumbnail of `website_image`"""
import requests.exceptions
if not self.is_new() and self.website_image != frappe.db.get_value(self.doctype, self.name, "website_image"):
self.thumbnail = None
if self.website_image and not self.thumbnail:
file_doc = None
try:
file_doc = frappe.get_doc("File", {
"file_url": self.website_image,
"attached_to_doctype": "Item",
"attached_to_name": self.name
})
except frappe.DoesNotExistError:
pass
# cleanup
frappe.local.message_log.pop()
except requests.exceptions.HTTPError:
frappe.msgprint(_("Warning: Invalid attachment {0}").format(self.website_image))
self.website_image = None
except requests.exceptions.SSLError:
frappe.msgprint(
_("Warning: Invalid SSL certificate on attachment {0}").format(self.website_image))
self.website_image = None
# for CSV import
if self.website_image and not file_doc:
try:
file_doc = frappe.get_doc({
"doctype": "File",
"file_url": self.website_image,
"attached_to_doctype": "Item",
"attached_to_name": self.name
}).insert()
except IOError:
self.website_image = None
if file_doc:
if not file_doc.thumbnail_url:
file_doc.make_thumbnail()
self.thumbnail = file_doc.thumbnail_url
def validate_fixed_asset(self):
if self.is_fixed_asset:
if self.is_stock_item:
frappe.throw(_("Fixed Asset Item must be a non-stock item."))
if not self.asset_category:
frappe.throw(_("Asset Category is mandatory for Fixed Asset item"))
if self.stock_ledger_created():
frappe.throw(_("Cannot be a fixed asset item as Stock Ledger is created."))
if not self.is_fixed_asset:
asset = frappe.db.get_all("Asset", filters={"item_code": self.name, "docstatus": 1}, limit=1)
if asset:
frappe.throw(_('"Is Fixed Asset" cannot be unchecked, as Asset record exists against the item'))
def validate_retain_sample(self):
if self.retain_sample and not frappe.db.get_single_value('Stock Settings', 'sample_retention_warehouse'):
frappe.throw(_("Please select Sample Retention Warehouse in Stock Settings first"))
if self.retain_sample and not self.has_batch_no:
frappe.throw(_(" {0} Retain Sample is based on batch, please check Has Batch No to retain sample of item").format(
self.item_code))
def get_context(self, context):
context.show_search = True
context.search_link = '/product_search'
context.parents = get_parent_item_groups(self.item_group)
self.set_variant_context(context)
self.set_attribute_context(context)
self.set_disabled_attributes(context)
return context
def set_variant_context(self, context):
if self.has_variants:
context.no_cache = True
# load variants
# also used in set_attribute_context
context.variants = frappe.get_all("Item",
filters={"variant_of": self.name, "show_variant_in_website": 1},
order_by="name asc")
variant = frappe.form_dict.variant
if not variant and context.variants:
# the case when the item is opened for the first time from its list
variant = context.variants[0]
if variant:
context.variant = frappe.get_doc("Item", variant)
for fieldname in ("website_image", "web_long_description", "description",
"website_specifications"):
if context.variant.get(fieldname):
value = context.variant.get(fieldname)
if isinstance(value, list):
value = [d.as_dict() for d in value]
context[fieldname] = value
if self.slideshow:
if context.variant and context.variant.slideshow:
context.update(get_slideshow(context.variant))
else:
context.update(get_slideshow(self))
def set_attribute_context(self, context):
if self.has_variants:
attribute_values_available = {}
context.attribute_values = {}
context.selected_attributes = {}
# load attributes
for v in context.variants:
v.attributes = frappe.get_all("Item Variant Attribute",
fields=["attribute", "attribute_value"],
filters={"parent": v.name})
for attr in v.attributes:
values = attribute_values_available.setdefault(attr.attribute, [])
if attr.attribute_value not in values:
values.append(attr.attribute_value)
if v.name == context.variant.name:
context.selected_attributes[attr.attribute] = attr.attribute_value
# filter attributes, order based on attribute table
for attr in self.attributes:
values = context.attribute_values.setdefault(attr.attribute, [])
if cint(frappe.db.get_value("Item Attribute", attr.attribute, "numeric_values")):
for val in sorted(attribute_values_available.get(attr.attribute, []), key=flt):
values.append(val)
else:
# get list of values defined (for sequence)
for attr_value in frappe.db.get_all("Item Attribute Value",
fields=["attribute_value"],
filters={"parent": attr.attribute}, order_by="idx asc"):
if attr_value.attribute_value in attribute_values_available.get(attr.attribute, []):
values.append(attr_value.attribute_value)
context.variant_info = json.dumps(context.variants)
def set_disabled_attributes(self, context):
"""Disable selection options of attribute combinations that do not result in a variant"""
if not self.attributes or not self.has_variants:
return
context.disabled_attributes = {}
attributes = [attr.attribute for attr in self.attributes]
def find_variant(combination):
for variant in context.variants:
if len(variant.attributes) < len(attributes):
continue
if "combination" not in variant:
ref_combination = []
for attr in variant.attributes:
idx = attributes.index(attr.attribute)
ref_combination.insert(idx, attr.attribute_value)
variant["combination"] = ref_combination
if not (set(combination) - set(variant["combination"])):
# check if the combination is a subset of a variant combination
# eg. [Blue, 0.5] is a possible combination if exists [Blue, Large, 0.5]
return True
for i, attr in enumerate(self.attributes):
if i == 0:
continue
combination_source = []
# loop through previous attributes
for prev_attr in self.attributes[:i]:
combination_source.append([context.selected_attributes.get(prev_attr.attribute)])
combination_source.append(context.attribute_values[attr.attribute])
for combination in itertools.product(*combination_source):
if not find_variant(combination):
context.disabled_attributes.setdefault(attr.attribute, []).append(combination[-1])
def add_default_uom_in_conversion_factor_table(self):
uom_conv_list = [d.uom for d in self.get("uoms")]
if self.stock_uom not in uom_conv_list:
ch = self.append('uoms', {})
ch.uom = self.stock_uom
ch.conversion_factor = 1
to_remove = []
for d in self.get("uoms"):
if d.conversion_factor == 1 and d.uom != self.stock_uom:
to_remove.append(d)
[self.remove(d) for d in to_remove]
def update_template_tables(self):
template = frappe.get_doc("Item", self.variant_of)
# add item taxes from template
for d in template.get("taxes"):
self.append("taxes", {"tax_type": d.tax_type, "tax_rate": d.tax_rate})
# copy re-order table if empty
if not self.get("reorder_levels"):
for d in template.get("reorder_levels"):
n = {}
for k in ("warehouse", "warehouse_reorder_level",
"warehouse_reorder_qty", "material_request_type"):
n[k] = d.get(k)
self.append("reorder_levels", n)
def validate_conversion_factor(self):
check_list = []
for d in self.get('uoms'):
if cstr(d.uom) in check_list:
frappe.throw(
_("Unit of Measure {0} has been entered more than once in Conversion Factor Table").format(d.uom))
else:
check_list.append(cstr(d.uom))
if d.uom and cstr(d.uom) == cstr(self.stock_uom) and flt(d.conversion_factor) != 1:
frappe.throw(
_("Conversion factor for default Unit of Measure must be 1 in row {0}").format(d.idx))
def validate_item_type(self):
if self.has_serial_no == 1 and self.is_stock_item == 0 and not self.is_fixed_asset:
msgprint(_("'Has Serial No' can not be 'Yes' for non-stock item"), raise_exception=1)
if self.has_serial_no == 0 and self.serial_no_series:
self.serial_no_series = None
def check_for_active_boms(self):
if self.default_bom:
bom_item = frappe.db.get_value("BOM", self.default_bom, "item")
if bom_item not in (self.name, self.variant_of):
frappe.throw(
_("Default BOM ({0}) must be active for this item or its template").format(bom_item))
def fill_customer_code(self):
""" Append all the customer codes and insert into "customer_code" field of item table """
cust_code = []
for d in self.get('customer_items'):
cust_code.append(d.ref_code)
self.customer_code = ','.join(cust_code)
def check_item_tax(self):
"""Check whether Tax Rate is not entered twice for same Tax Type"""
check_list = []
for d in self.get('taxes'):
if d.tax_type:
account_type = frappe.db.get_value("Account", d.tax_type, "account_type")
if account_type not in ['Tax', 'Chargeable', 'Income Account', 'Expense Account']:
frappe.throw(
_("Item Tax Row {0} must have account of type Tax or Income or Expense or Chargeable").format(d.idx))
else:
if d.tax_type in check_list:
frappe.throw(_("{0} entered twice in Item Tax").format(d.tax_type))
else:
check_list.append(d.tax_type)
def validate_barcode(self):
from stdnum import ean
if len(self.barcodes) > 0:
for item_barcode in self.barcodes:
options = frappe.get_meta("Item Barcode").get_options("barcode_type").split('\n')
if item_barcode.barcode:
duplicate = frappe.db.sql(
"""select parent from `tabItem Barcode` where barcode = %s and parent != %s""", (item_barcode.barcode, self.name))
if duplicate:
frappe.throw(_("Barcode {0} already used in Item {1}").format(
item_barcode.barcode, duplicate[0][0]), frappe.DuplicateEntryError)
item_barcode.barcode_type = "" if item_barcode.barcode_type not in options else item_barcode.barcode_type
if item_barcode.barcode_type and item_barcode.barcode_type.upper() in ('EAN', 'UPC-A', 'EAN-13', 'EAN-8'):
if not ean.is_valid(item_barcode.barcode):
frappe.throw(_("Barcode {0} is not a valid {1} code").format(
item_barcode.barcode, item_barcode.barcode_type), InvalidBarcode)
def validate_warehouse_for_reorder(self):
'''Validate Reorder level table for duplicate and conditional mandatory'''
warehouse = []
for d in self.get("reorder_levels"):
if not d.warehouse_group:
d.warehouse_group = d.warehouse
if d.get("warehouse") and d.get("warehouse") not in warehouse:
warehouse += [d.get("warehouse")]
else:
frappe.throw(_("Row {0}: An Reorder entry already exists for this warehouse {1}")
.format(d.idx, d.warehouse), DuplicateReorderRows)
if d.warehouse_reorder_level and not d.warehouse_reorder_qty:
frappe.throw(_("Row #{0}: Please set reorder quantity").format(d.idx))
def stock_ledger_created(self):
if not hasattr(self, '_stock_ledger_created'):
self._stock_ledger_created = len(frappe.db.sql("""select name from `tabStock Ledger Entry`
where item_code = %s limit 1""", self.name))
return self._stock_ledger_created
def validate_name_with_item_group(self):
# causes problem with tree build
if frappe.db.exists("Item Group", self.name):
frappe.throw(
_("An Item Group exists with same name, please change the item name or rename the item group"))
def update_item_price(self):
frappe.db.sql("""update `tabItem Price` set item_name=%s,
item_description=%s, brand=%s where item_code=%s""",
(self.item_name, self.description, self.brand, self.name))
def on_trash(self):
super(Item, self).on_trash()
frappe.db.sql("""delete from tabBin where item_code=%s""", self.name)
frappe.db.sql("delete from `tabItem Price` where item_code=%s", self.name)
for variant_of in frappe.get_all("Item", filters={"variant_of": self.name}):
frappe.delete_doc("Item", variant_of.name)
def before_rename(self, old_name, new_name, merge=False):
if self.item_name == old_name:
frappe.db.set_value("Item", old_name, "item_name", new_name)
if merge:
# Validate properties before merging
if not frappe.db.exists("Item", new_name):
frappe.throw(_("Item {0} does not exist").format(new_name))
field_list = ["stock_uom", "is_stock_item", "has_serial_no", "has_batch_no"]
new_properties = [cstr(d) for d in frappe.db.get_value("Item", new_name, field_list)]
if new_properties != [cstr(self.get(fld)) for fld in field_list]:
frappe.throw(_("To merge, following properties must be same for both items")
+ ": \n" + ", ".join([self.meta.get_label(fld) for fld in field_list]))
def after_rename(self, old_name, new_name, merge):
if self.route:
invalidate_cache_for_item(self)
clear_cache(self.route)
frappe.db.set_value("Item", new_name, "item_code", new_name)
if merge:
self.set_last_purchase_rate(new_name)
self.recalculate_bin_qty(new_name)
for dt in ("Sales Taxes and Charges", "Purchase Taxes and Charges"):
for d in frappe.db.sql("""select name, item_wise_tax_detail from `tab{0}`
where ifnull(item_wise_tax_detail, '') != ''""".format(dt), as_dict=1):
item_wise_tax_detail = json.loads(d.item_wise_tax_detail)
if isinstance(item_wise_tax_detail, dict) and old_name in item_wise_tax_detail:
item_wise_tax_detail[new_name] = item_wise_tax_detail[old_name]
item_wise_tax_detail.pop(old_name)
frappe.db.set_value(dt, d.name, "item_wise_tax_detail",
json.dumps(item_wise_tax_detail), update_modified=False)
def set_last_purchase_rate(self, new_name):
last_purchase_rate = get_last_purchase_details(new_name).get("base_rate", 0)
frappe.db.set_value("Item", new_name, "last_purchase_rate", last_purchase_rate)
def recalculate_bin_qty(self, new_name):
from erpnext.stock.stock_balance import repost_stock
frappe.db.auto_commit_on_many_writes = 1
existing_allow_negative_stock = frappe.db.get_value("Stock Settings", None, "allow_negative_stock")
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", 1)
repost_stock_for_warehouses = frappe.db.sql_list("""select distinct warehouse
from tabBin where item_code=%s""", new_name)
# Delete all existing bins to avoid duplicate bins for the same item and warehouse
frappe.db.sql("delete from `tabBin` where item_code=%s", new_name)
for warehouse in repost_stock_for_warehouses:
repost_stock(new_name, warehouse)
frappe.db.set_value("Stock Settings", None, "allow_negative_stock", existing_allow_negative_stock)
frappe.db.auto_commit_on_many_writes = 0
def copy_specification_from_item_group(self):
self.set("website_specifications", [])
if self.item_group:
for label, desc in frappe.db.get_values("Item Website Specification",
{"parent": self.item_group}, ["label", "description"]):
row = self.append("website_specifications")
row.label = label
row.description = desc
def update_bom_item_desc(self):
if self.is_new():
return
if self.db_get('description') != self.description:
frappe.db.sql("""
update `tabBOM`
set description = %s
where item = %s and docstatus < 2
""", (self.description, self.name))
frappe.db.sql("""
update `tabBOM Item`
set description = %s
where item_code = %s and docstatus < 2
""", (self.description, self.name))
frappe.db.sql("""
update `tabBOM Explosion Item`
set description = %s
where item_code = %s and docstatus < 2
""", (self.description, self.name))
def update_template_item(self):
"""Set Show in Website for Template Item if True for its Variant"""
if self.variant_of:
if self.show_in_website:
self.show_variant_in_website = 1
self.show_in_website = 0
if self.show_variant_in_website:
# show template
template_item = frappe.get_doc("Item", self.variant_of)
if not template_item.show_in_website:
template_item.show_in_website = 1
template_item.flags.dont_update_variants = True
template_item.flags.ignore_permissions = True
template_item.save()<|fim▁hole|> companies = list(set([row.company for row in self.item_defaults]))
if len(companies) != len(self.item_defaults):
frappe.throw(_("Cannot set multiple Item Defaults for a company."))
def update_defaults_from_item_group(self):
"""Get defaults from Item Group"""
if self.item_group and not self.item_defaults:
item_defaults = frappe.db.get_values("Item Default", {"parent": self.item_group},
['company', 'default_warehouse','default_price_list','buying_cost_center','default_supplier',
'expense_account','selling_cost_center','income_account'], as_dict = 1)
if item_defaults:
for item in item_defaults:
self.append('item_defaults', {
'company': item.company,
'default_warehouse': item.default_warehouse,
'default_price_list': item.default_price_list,
'buying_cost_center': item.buying_cost_center,
'default_supplier': item.default_supplier,
'expense_account': item.expense_account,
'selling_cost_center': item.selling_cost_center,
'income_account': item.income_account
})
else:
warehouse = ''
defaults = frappe.defaults.get_defaults() or {}
# To check default warehouse is belong to the default company
if defaults.get("default_warehouse") and frappe.db.exists("Warehouse",
{'name': defaults.default_warehouse, 'company': defaults.company}):
warehouse = defaults.default_warehouse
self.append("item_defaults", {
"company": defaults.get("company"),
"default_warehouse": warehouse
})
def update_variants(self):
if self.flags.dont_update_variants or \
frappe.db.get_single_value('Item Variant Settings', 'do_not_update_variants'):
return
if self.has_variants:
variants = frappe.db.get_all("Item", fields=["item_code"], filters={"variant_of": self.name})
if variants:
if len(variants) <= 30:
update_variants(variants, self, publish_progress=False)
frappe.msgprint(_("Item Variants updated"))
else:
frappe.enqueue("erpnext.stock.doctype.item.item.update_variants",
variants=variants, template=self, now=frappe.flags.in_test, timeout=600)
def validate_has_variants(self):
if not self.has_variants and frappe.db.get_value("Item", self.name, "has_variants"):
if frappe.db.exists("Item", {"variant_of": self.name}):
frappe.throw(_("Item has variants."))
def validate_stock_exists_for_template_item(self):
if self.stock_ledger_created() and self._doc_before_save:
if (cint(self._doc_before_save.has_variants) != cint(self.has_variants)
or self._doc_before_save.variant_of != self.variant_of):
frappe.throw(_("Cannot change Variant properties after stock transaction. You will have to make a new Item to do this.").format(self.name),
StockExistsForTemplate)
if self.has_variants or self.variant_of:
if not self.is_child_table_same('attributes'):
frappe.throw(
_('Cannot change Attributes after stock transaction. Make a new Item and transfer stock to the new Item'))
def validate_variant_based_on_change(self):
if not self.is_new() and (self.variant_of or (self.has_variants and frappe.get_all("Item", {"variant_of": self.name}))):
if self.variant_based_on != frappe.db.get_value("Item", self.name, "variant_based_on"):
frappe.throw(_("Variant Based On cannot be changed"))
def validate_uom(self):
if not self.get("__islocal"):
check_stock_uom_with_bin(self.name, self.stock_uom)
if self.has_variants:
for d in frappe.db.get_all("Item", filters={"variant_of": self.name}):
check_stock_uom_with_bin(d.name, self.stock_uom)
if self.variant_of:
template_uom = frappe.db.get_value("Item", self.variant_of, "stock_uom")
if template_uom != self.stock_uom:
frappe.throw(_("Default Unit of Measure for Variant '{0}' must be same as in Template '{1}'")
.format(self.stock_uom, template_uom))
def validate_uom_conversion_factor(self):
if self.uoms:
for d in self.uoms:
value = get_uom_conv_factor(d.uom, self.stock_uom)
if value:
d.conversion_factor = value
def validate_attributes(self):
if not (self.has_variants or self.variant_of):
return
if not self.variant_based_on:
self.variant_based_on = 'Item Attribute'
if self.variant_based_on == 'Item Attribute':
attributes = []
if not self.attributes:
frappe.throw(_("Attribute table is mandatory"))
for d in self.attributes:
if d.attribute in attributes:
frappe.throw(
_("Attribute {0} selected multiple times in Attributes Table".format(d.attribute)))
else:
attributes.append(d.attribute)
def validate_variant_attributes(self):
if self.is_new() and self.variant_of and self.variant_based_on == 'Item Attribute':
args = {}
for d in self.attributes:
if cstr(d.attribute_value).strip() == '':
frappe.throw(_("Please specify Attribute Value for attribute {0}").format(d.attribute))
args[d.attribute] = d.attribute_value
variant = get_variant(self.variant_of, args, self.name)
if variant:
frappe.throw(_("Item variant {0} exists with same attributes")
.format(variant), ItemVariantExistsError)
validate_item_variant_attributes(self, args)
def validate_stock_for_has_batch_and_has_serial(self):
if self.stock_ledger_created():
for value in ["has_batch_no", "has_serial_no"]:
if frappe.db.get_value("Item", self.name, value) != self.get_value(value):
frappe.throw(_("Cannot change {0} as Stock Transaction for Item {1} exist.".format(value, self.name)))
def get_timeline_data(doctype, name):
'''returns timeline data based on stock ledger entry'''
out = {}
items = dict(frappe.db.sql('''select posting_date, count(*)
from `tabStock Ledger Entry` where item_code=%s
and posting_date > date_sub(curdate(), interval 1 year)
group by posting_date''', name))
for date, count in iteritems(items):
timestamp = get_timestamp(date)
out.update({timestamp: count})
return out
def validate_end_of_life(item_code, end_of_life=None, disabled=None, verbose=1):
if (not end_of_life) or (disabled is None):
end_of_life, disabled = frappe.db.get_value("Item", item_code, ["end_of_life", "disabled"])
if end_of_life and end_of_life != "0000-00-00" and getdate(end_of_life) <= now_datetime().date():
msg = _("Item {0} has reached its end of life on {1}").format(item_code, formatdate(end_of_life))
_msgprint(msg, verbose)
if disabled:
_msgprint(_("Item {0} is disabled").format(item_code), verbose)
def validate_is_stock_item(item_code, is_stock_item=None, verbose=1):
if not is_stock_item:
is_stock_item = frappe.db.get_value("Item", item_code, "is_stock_item")
if is_stock_item != 1:
msg = _("Item {0} is not a stock Item").format(item_code)
_msgprint(msg, verbose)
def validate_cancelled_item(item_code, docstatus=None, verbose=1):
if docstatus is None:
docstatus = frappe.db.get_value("Item", item_code, "docstatus")
if docstatus == 2:
msg = _("Item {0} is cancelled").format(item_code)
_msgprint(msg, verbose)
def _msgprint(msg, verbose):
if verbose:
msgprint(msg, raise_exception=True)
else:
raise frappe.ValidationError(msg)
def get_last_purchase_details(item_code, doc_name=None, conversion_rate=1.0):
"""returns last purchase details in stock uom"""
# get last purchase order item details
last_purchase_order = frappe.db.sql("""\
select po.name, po.transaction_date, po.conversion_rate,
po_item.conversion_factor, po_item.base_price_list_rate,
po_item.discount_percentage, po_item.base_rate
from `tabPurchase Order` po, `tabPurchase Order Item` po_item
where po.docstatus = 1 and po_item.item_code = %s and po.name != %s and
po.name = po_item.parent
order by po.transaction_date desc, po.name desc
limit 1""", (item_code, cstr(doc_name)), as_dict=1)
# get last purchase receipt item details
last_purchase_receipt = frappe.db.sql("""\
select pr.name, pr.posting_date, pr.posting_time, pr.conversion_rate,
pr_item.conversion_factor, pr_item.base_price_list_rate, pr_item.discount_percentage,
pr_item.base_rate
from `tabPurchase Receipt` pr, `tabPurchase Receipt Item` pr_item
where pr.docstatus = 1 and pr_item.item_code = %s and pr.name != %s and
pr.name = pr_item.parent
order by pr.posting_date desc, pr.posting_time desc, pr.name desc
limit 1""", (item_code, cstr(doc_name)), as_dict=1)
purchase_order_date = getdate(last_purchase_order and last_purchase_order[0].transaction_date
or "1900-01-01")
purchase_receipt_date = getdate(last_purchase_receipt and
last_purchase_receipt[0].posting_date or "1900-01-01")
if (purchase_order_date > purchase_receipt_date) or \
(last_purchase_order and not last_purchase_receipt):
# use purchase order
last_purchase = last_purchase_order[0]
purchase_date = purchase_order_date
elif (purchase_receipt_date > purchase_order_date) or \
(last_purchase_receipt and not last_purchase_order):
# use purchase receipt
last_purchase = last_purchase_receipt[0]
purchase_date = purchase_receipt_date
else:
return frappe._dict()
conversion_factor = flt(last_purchase.conversion_factor)
out = frappe._dict({
"base_price_list_rate": flt(last_purchase.base_price_list_rate) / conversion_factor,
"base_rate": flt(last_purchase.base_rate) / conversion_factor,
"discount_percentage": flt(last_purchase.discount_percentage),
"purchase_date": purchase_date
})
conversion_rate = flt(conversion_rate) or 1.0
out.update({
"price_list_rate": out.base_price_list_rate / conversion_rate,
"rate": out.base_rate / conversion_rate,
"base_rate": out.base_rate
})
return out
def invalidate_cache_for_item(doc):
invalidate_cache_for(doc, doc.item_group)
website_item_groups = list(set((doc.get("old_website_item_groups") or [])
+ [d.item_group for d in doc.get({"doctype": "Website Item Group"}) if d.item_group]))
for item_group in website_item_groups:
invalidate_cache_for(doc, item_group)
if doc.get("old_item_group") and doc.get("old_item_group") != doc.item_group:
invalidate_cache_for(doc, doc.old_item_group)
def check_stock_uom_with_bin(item, stock_uom):
if stock_uom == frappe.db.get_value("Item", item, "stock_uom"):
return
matched = True
ref_uom = frappe.db.get_value("Stock Ledger Entry",
{"item_code": item}, "stock_uom")
if ref_uom:
if cstr(ref_uom) != cstr(stock_uom):
matched = False
else:
bin_list = frappe.db.sql("select * from tabBin where item_code=%s", item, as_dict=1)
for bin in bin_list:
if (bin.reserved_qty > 0 or bin.ordered_qty > 0 or bin.indented_qty > 0
or bin.planned_qty > 0) and cstr(bin.stock_uom) != cstr(stock_uom):
matched = False
break
if matched and bin_list:
frappe.db.sql("""update tabBin set stock_uom=%s where item_code=%s""", (stock_uom, item))
if not matched:
frappe.throw(
_("Default Unit of Measure for Item {0} cannot be changed directly because you have already made some transaction(s) with another UOM. You will need to create a new Item to use a different Default UOM.").format(item))
def get_item_defaults(item_code, company):
item = frappe.get_cached_doc('Item', item_code)
out = item.as_dict()
for d in item.item_defaults:
if d.company == company:
row = copy.deepcopy(d.as_dict())
row.pop("name")
out.update(row)
return out
def set_item_default(item_code, company, fieldname, value):
item = frappe.get_cached_doc('Item', item_code)
for d in item.item_defaults:
if d.company == company:
if not d.get(fieldname):
frappe.db.set_value(d.doctype, d.name, fieldname, value)
return
# no row found, add a new row for the company
d = item.append('item_defaults', {fieldname: value, "company": company})
d.db_insert()
item.clear_cache()
@frappe.whitelist()
def get_uom_conv_factor(uom, stock_uom):
uoms = [uom, stock_uom]
value = ""
uom_details = frappe.db.sql("""select to_uom, from_uom, value from `tabUOM Conversion Factor`\
where to_uom in ({0})
""".format(', '.join(['"' + frappe.db.escape(i, percent=False) + '"' for i in uoms])), as_dict=True)
for d in uom_details:
if d.from_uom == stock_uom and d.to_uom == uom:
value = 1/flt(d.value)
elif d.from_uom == uom and d.to_uom == stock_uom:
value = d.value
if not value:
uom_stock = frappe.db.get_value("UOM Conversion Factor", {"to_uom": stock_uom}, ["from_uom", "value"], as_dict=1)
uom_row = frappe.db.get_value("UOM Conversion Factor", {"to_uom": uom}, ["from_uom", "value"], as_dict=1)
if uom_stock and uom_row:
if uom_stock.from_uom == uom_row.from_uom:
value = flt(uom_stock.value) * 1/flt(uom_row.value)
return value
@frappe.whitelist()
def get_item_attribute(parent, attribute_value=''):
if not frappe.has_permission("Item"):
frappe.msgprint(_("No Permission"), raise_exception=1)
return frappe.get_all("Item Attribute Value", fields = ["attribute_value"],
filters = {'parent': parent, 'attribute_value': ("like", "%%%s%%" % attribute_value)})
def update_variants(variants, template, publish_progress=True):
count=0
for d in variants:
variant = frappe.get_doc("Item", d)
copy_attributes_to_variant(template, variant)
variant.save()
count+=1
if publish_progress:
frappe.publish_progress(count*100/len(variants), title = _("Updating Variants..."))<|fim▁end|> |
def validate_item_defaults(self): |
<|file_name|>DCList.hpp<|end_file_name|><|fim▁begin|>#ifndef DCLIST_HPP
#define DCLIST_HPP
#include "celltype.hpp"
#include <string>
#include <sstream>
#include <exception>
class DCList
{
private:
celltype* head, *last;
bool swapped;
int makeList(int*);
public:
DCList();
~DCList();
void insert(int);
int extract(void);
void swap(void);
void makenull(void);
bool empty(void);
bool isSwapped(void);
std::string list(bool);
void cut(int);<|fim▁hole|> celltype* getLast(void);
celltype* locate(int);
};
#endif // DCLIST_HPP<|fim▁end|> | celltype* getHead(void); |
<|file_name|>score.py<|end_file_name|><|fim▁begin|>from typing import Union
from brown import constants
from brown.core import brown
from brown.core.music_font import MusicFont
from brown.core.object_group import ObjectGroup
from brown.core.path import Path<|fim▁hole|>from brown.core.staff import Staff
from examples.feldman_projections_2.glyph_name import GlyphName
from examples.feldman_projections_2.grid_unit import GridUnit
from examples.feldman_projections_2.instrument_data import InstrumentData
from examples.feldman_projections_2.measure import Measure
from examples.feldman_projections_2.music_text_event import MusicTextEvent
from examples.feldman_projections_2.text_event import TextEvent
class Score(ObjectGroup):
_TEXT_FONT_SIZE = GridUnit(0.6).base_value
_MUSIC_FONT_SIZE = Staff._make_unit_class(GridUnit(0.5))
_bar_line_pen = Pen(thickness=GridUnit(0.05), pattern=PenPattern.DOT)
_instrument_divider_pen = Pen(thickness=GridUnit(0.05))
def __init__(self, pos, instruments, parent):
super().__init__(pos, parent)
self.events = []
self.text_font = brown.default_font.modified(
size=Score._TEXT_FONT_SIZE, weight=60
)
self.music_font = MusicFont(
constants.DEFAULT_MUSIC_FONT_NAME, Score._MUSIC_FONT_SIZE
)
self.instruments = instruments
for i, instrument in enumerate(instruments):
for event_data in instrument.event_data:
self.events.append(self._create_event(i, event_data))
self.draw_instrument_dividers()
self.draw_bar_lines()
def _create_event(self, instrument_index, event_data):
if isinstance(event_data.text, GlyphName):
return self._create_music_text_event(instrument_index, event_data)
return self._create_text_event(instrument_index, event_data)
def _create_text_event(self, instrument_index, event_data):
return TextEvent(
(
event_data.pos_x,
(Score._instrument_pos_y(instrument_index) + event_data.register.value),
),
self,
event_data.length,
event_data.text,
self.text_font,
)
def _create_music_text_event(self, instrument_index, event_data):
return MusicTextEvent(
(
event_data.pos_x,
(Score._instrument_pos_y(instrument_index) + event_data.register.value),
),
self,
event_data.length,
event_data.text,
self.music_font,
)
@property
def measure_count(self):
return (
max(
max(int(Measure(e.pos_x).display_value) for e in i.event_data)
for i in self.instruments
)
+ 1
)
@staticmethod
def _instrument_pos_y(instrument_index):
return GridUnit(3 * instrument_index)
@staticmethod
def _divider_pos_y(divider_index):
return GridUnit(3 * divider_index)
@staticmethod
def _divider_visible(
instrument_above: Union[InstrumentData, None],
instrument_below: Union[InstrumentData, None],
measure_num: int,
) -> bool:
return (
instrument_above is not None
and instrument_above.measure_has_events(measure_num)
) or (
instrument_below is not None
and instrument_below.measure_has_events(measure_num)
)
def _bar_line_extends_below(self, measure_num: int, divider_num: int) -> bool:
if divider_num >= len(self.instruments):
return False
instrument = self.instruments[divider_num]
return instrument.measure_has_events(
measure_num - 1
) or instrument.measure_has_events(measure_num)
def draw_instrument_dividers(self):
for divider in range(len(self.instruments) + 1):
current_path = Path(
(Measure(0), Score._divider_pos_y(divider)),
pen=Score._instrument_divider_pen,
parent=self,
)
instrument_above = self.instruments[divider - 1] if divider > 0 else None
instrument_below = (
self.instruments[divider] if divider < len(self.instruments) else None
)
drawing = False
for measure_num in range(self.measure_count + 1):
if Score._divider_visible(
instrument_above, instrument_below, measure_num
):
if not drawing:
current_path.move_to(Measure(measure_num), GridUnit(0))
drawing = True
else:
if drawing:
current_path.line_to(Measure(measure_num), GridUnit(0))
drawing = False
def draw_bar_lines(self):
for measure_num in range(self.measure_count + 1):
current_path = Path(
(Measure(measure_num), GridUnit(0)),
pen=Score._bar_line_pen,
parent=self,
)
drawing = False
for divider_num in range(len(self.instruments) + 1):
if self._bar_line_extends_below(measure_num, divider_num):
if not drawing:
current_path.move_to(
GridUnit(0), Score._instrument_pos_y(divider_num)
)
drawing = True
else:
if drawing:
current_path.line_to(
GridUnit(0), Score._instrument_pos_y(divider_num)
)
drawing = False<|fim▁end|> | from brown.core.pen import Pen
from brown.core.pen_pattern import PenPattern |
<|file_name|>response_message.cc<|end_file_name|><|fim▁begin|>// Copyright (C) 1999,2000 Bruce Guenter <[email protected]>
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#include <config.h>
#include "response.h"
mystring response::codestr() const
{
static const mystring errstr = "ERROR";<|fim▁hole|> static const mystring econnstr = "ECONN";
static const mystring badstr = "BAD";
static const mystring okstr = "OK";
static const mystring unknownstr = "???";
switch(code) {
case err: return errstr;
case econn: return econnstr;
case bad: return badstr;
case ok: return okstr;
default: return unknownstr;
}
}
mystring response::message() const
{
return codestr() + ": " + msg;
}<|fim▁end|> | |
<|file_name|>ili9341.rs<|end_file_name|><|fim▁begin|>// Zinc, the bare metal stack for rust.
// Copyright 2014 Vladimir "farcaller" Pouzanov <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Driver for the ILI9341 LCD.
use core::iter::range;
use super::LCD;
use drivers::chario::CharIO;
use hal::timer::Timer;
use hal::pin::Gpio;
use hal::spi::Spi;
/// ILI9341 driver.
pub struct ILI9341<'a, S:'a, T:'a, P:'a> {
spi: &'a S,
timer: &'a T,
dc: &'a P,
cs: &'a P,
reset: &'a P,
// backlight: gpio::OutGPIO,
}
impl<'a, S: Spi, T: Timer, P: Gpio> ILI9341<'a, S, T, P> {
/// Creates a new ILI9341 driver instance.
pub fn new(spi: &'a S, timer: &'a T, dc: &'a P, cs: &'a P, reset: &'a P)
-> ILI9341<'a, S, T, P> {
let lcd = ILI9341 {
spi: spi,
timer: timer,
dc: dc,
cs: cs,
reset:reset,
// dc: gpio::OutGPIO::new(0, 24),<|fim▁hole|> // reset: gpio::OutGPIO::new(0, 23),
// backlight: gpio::OutGPIO::new(0, 999),
};
// FIXME(farcaller): SPI uses MSB, SCL rising
lcd.configure();
lcd
}
fn configure(&self) {
self.cs.set_high();
self.dc.set_high();
self.reset.set_low();
self.timer.wait_ms(10);
self.reset.set_high();
self.verify_id(); // this fails :)
self.verify_id(); // TODO(farcaller): verify that this didn't fail or bail out
self.verify_id(); // and this as well
self.set_power_control_a();
self.set_power_control_b();
self.driver_timing_control_a();
self.driver_timing_control_b();
self.power_on_sequence_control();
self.pump_ratio_control();
self.power_control_1();
self.power_control_2();
self.vcom_control_1();
self.everything_else();
}
fn verify_id(&self) -> bool {
let mut data: [u8; 3] = [0, 0, 0];
let id: [u8; 3] = [0x00, 0x93, 0x41];
for i in range(0, 3) {
data[i] = self.read_register(0xd3, (i+1) as u8);
if data[i] != id[i] {
return false;
}
}
true
}
fn set_power_control_a(&self) {
self.send_cmd(0xcb);
self.write_data(0x39);
self.write_data(0x2c);
self.write_data(0x00);
self.write_data(0x34); // REG_VD = 0b100 = Vcore 1.6V
self.write_data(0x02); // VBC = 0b010 = DDVDH 5.6V
}
fn set_power_control_b(&self) {
self.send_cmd(0xcf);
self.write_data(0x00);
self.write_data(0xc1); // TODO(farcaller): according to the spec this is 0x81
self.write_data(0x30); // ESD protection enabled
}
fn driver_timing_control_a(&self) {
self.send_cmd(0xe8);
self.write_data(0x85); // Non-overlap timing control = 1 unit
self.write_data(0x00); // EQ timing = 1 unit; CR timing = 1 unit
self.write_data(0x78); // Pre-chanrge timing = 2 unit
}
fn driver_timing_control_b(&self) {
self.send_cmd(0xea);
self.write_data(0x00); // 0 units EQ to GND, DDVDH
self.write_data(0x00);
}
fn power_on_sequence_control(&self) {
self.send_cmd(0xed);
self.write_data(0x64); // CP1,CP2,CP3 soft start keep 1 frame
self.write_data(0x03); // Vcl 1st frame enable; DDVDH 4th frame enable
self.write_data(0x12); // Vgh 2nd frame enable; Vgl 3rd frame enable
self.write_data(0x81); // DDVDH enhance mode enabled
}
fn pump_ratio_control(&self) {
self.send_cmd(0xf7);
self.write_data(0x20); // DDVDH = 2xVCI
}
fn power_control_1(&self) {
self.send_cmd(0xc0);
self.write_data(0x23); // GVDD = 4.6V
}
fn power_control_2(&self) {
self.send_cmd(0xc1);
self.write_data(0x10); // another wtf. I guess it's DDVDH = VCI*2
// VGH = VCI*7
// VGL = -VCI*4
}
fn vcom_control_1(&self) {
self.send_cmd(0xc5);
self.write_data(0x3e); // VCOMH = 4.25V
self.write_data(0x28); // VCOML = -1.5V
}
fn everything_else(&self) {
self.send_cmd(0xC7);
self.write_data(0x86);
self.send_cmd(0x36);
self.write_data(0x48);
self.send_cmd(0x3A);
self.write_data(0x55);
self.send_cmd(0xB1);
self.write_data(0x00);
self.write_data(0x18);
self.send_cmd(0xB6);
self.write_data(0x08);
self.write_data(0x82);
self.write_data(0x27);
self.send_cmd(0xF2);
self.write_data(0x00);
self.send_cmd(0x26);
self.write_data(0x01);
self.send_cmd(0xE0);
self.write_data(0x0F);
self.write_data(0x31);
self.write_data(0x2B);
self.write_data(0x0C);
self.write_data(0x0E);
self.write_data(0x08);
self.write_data(0x4E);
self.write_data(0xF1);
self.write_data(0x37);
self.write_data(0x07);
self.write_data(0x10);
self.write_data(0x03);
self.write_data(0x0E);
self.write_data(0x09);
self.write_data(0x00);
self.send_cmd(0xE1);
self.write_data(0x00);
self.write_data(0x0E);
self.write_data(0x14);
self.write_data(0x03);
self.write_data(0x11);
self.write_data(0x07);
self.write_data(0x31);
self.write_data(0xC1);
self.write_data(0x48);
self.write_data(0x08);
self.write_data(0x0F);
self.write_data(0x0C);
self.write_data(0x31);
self.write_data(0x36);
self.write_data(0x0F);
self.send_cmd(0x11);
self.timer.wait_ms(120);
self.send_cmd(0x29);
self.send_cmd(0x2c);
}
fn read_register(&self, addr: u8, param: u8) -> u8 {
self.send_cmd(0xd9);
self.write_data(0x10 + param);
self.dc.set_low();
self.cs.set_low();
self.spi.transfer(addr);
self.dc.set_high();
let data = self.spi.transfer(0);
self.cs.set_high();
data
}
#[inline(never)]
fn send_cmd(&self, index: u8) {
self.dc.set_low();
self.cs.set_low();
self.spi.transfer(index);
self.cs.set_high();
}
#[inline(never)]
fn write_data(&self, data: u8) {
self.dc.set_high();
self.cs.set_low();
self.spi.transfer(data);
self.cs.set_high();
}
fn send_data(&self, data: u16) {
let data1: u8 = (data >> 8) as u8;
let data2: u8 = (data & 0xff) as u8;
self.dc.set_high();
self.cs.set_low();
self.spi.transfer(data1);
self.spi.transfer(data2);
self.cs.set_high();
}
fn set_col(&self, start: u16, end: u16) {
self.send_cmd(0x2a);
self.send_data(start);
self.send_data(end);
}
fn set_page(&self, start: u16, end: u16) {
self.send_cmd(0x2b);
self.send_data(start);
self.send_data(end);
}
fn do_clear(&self) {
self.set_col(0, 239);
self.set_page(0, 319);
self.send_cmd(0x2c);
self.dc.set_high();
self.cs.set_low();
for _ in range(0usize, 38400) {
self.spi.transfer(0);
self.spi.transfer(0);
self.spi.transfer(0);
self.spi.transfer(0);
}
self.cs.set_high();
}
fn do_pixel(&self, x: u32, y: u32, color: u16) {
self.set_col(x as u16, x as u16);
self.set_page(y as u16, y as u16);
self.send_cmd(0x2c);
self.send_data(color);
}
}
impl<'a, S: Spi, T: Timer, P: Gpio> LCD for ILI9341<'a, S, T, P> {
fn clear(&self) {
self.do_clear();
}
fn flush(&self) {}
fn pixel(&self, x: u32, y: u32, color: u16) {
self.do_pixel(x, y, color);
}
}
impl<'a, S: Spi, T: Timer, P: Gpio> CharIO for ILI9341<'a, S, T, P> {
fn putc(&self, _: char) {
// TODO(farcaller): implement
}
}<|fim▁end|> | // cs: gpio::OutGPIO::new(0, 16), |
<|file_name|>ast.rs<|end_file_name|><|fim▁begin|>pub struct ModuleDecl {
pub name: Ident,
pub endname: Ident,
pub components: Vec<ComponentDecl>,
pub entities: Vec<EntityDecl>
}
pub struct EntityDecl {
pub name: Ident,
pub endname: Ident,
pub generics: Option<Vec<GenericDecl>>,
pub ports: Option<Vec<PortDecl>>,
pub wires: Vec<WireDecl>,
pub insts: Vec<EntityInst>
}
pub struct ComponentDecl {
pub name: Ident,
pub endname: Ident,
pub generics: Option<Vec<GenericDecl>>,
pub ports: Option<Vec<PortDecl>>,
pub attributes: Vec<AttributeDef>
}
pub struct GenericDecl {
pub name: Ident,
pub gentype: Ident,
pub defval: Option<ConstExpr>
}
pub struct PortDecl {<|fim▁hole|>}
pub struct AttributeDef {
pub name: Ident,
pub typename: Ident,
pub value: ConstExpr
}
pub struct WireDecl {
pub name: Ident,
pub class: Ident
}
pub enum EntityInst {
Short(EntityInstShort),
Long(EntityInstLong)
}
pub struct EntityInstShort {
pub name: Ident,
pub ports: CommaList<Ident>,
pub entity: Ident,
pub generics: Option<CommaList<ConstExpr>>
}
pub struct EntityInstLong {
pub name: Ident,
pub entity: Ident,
pub generics: Option<Vec<GenericAssign>>,
pub ports: Option<Vec<PortAssign>>
}
pub struct GenericAssign {
pub generic: Ident,
pub value: ConstExpr
}
pub struct PortAssign {
pub port: Ident,
pub dir: Direction,
pub wire: Ident
}
pub struct CommaList<T> {
pub head: Vec<T>,
pub tail: T
}
pub enum ConstExpr {
Ident(Ident),
Number(Number),
String(String)
}
// Terminals
pub enum Direction { None, In, Out }
pub struct Number(pub String);
pub struct Ident(pub String);<|fim▁end|> | pub name: Ident,
pub dir: Direction,
pub class: Ident |
<|file_name|>pin-needed-to-poll-2.rs<|end_file_name|><|fim▁begin|>use std::{
future::Future,
pin::Pin,
marker::Unpin,
task::{Context, Poll},
};
struct Sleep(std::marker::PhantomPinned);
impl Future for Sleep {
type Output = ();
<|fim▁hole|>
impl Drop for Sleep {
fn drop(&mut self) {}
}
fn sleep() -> Sleep {
Sleep(std::marker::PhantomPinned)
}
struct MyFuture {
sleep: Sleep,
}
impl MyFuture {
fn new() -> Self {
Self {
sleep: sleep(),
}
}
}
impl Future for MyFuture {
type Output = ();
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Pin::new(&mut self.sleep).poll(cx)
//~^ ERROR `PhantomPinned` cannot be unpinned
}
}
fn main() {}<|fim▁end|> | fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
Poll::Ready(())
}
} |
<|file_name|>isomorphic_strings.js<|end_file_name|><|fim▁begin|>/**
* @param {string} s
* @param {string} t
* @return {boolean}
*/
var isIsomorphic = function(s, t) {
var s = s.split('');
var t = t.split('');
if (new Set(s).size !== new Set(t).size) return false;
var zip = new Set();
s.forEach(function (item, i) {
zip.add(s[i] + ' ' + t[i])
});<|fim▁hole|> return new Set(zip).size === new Set(s).size;
};
var eq = require('assert').equal;
eq(isIsomorphic('egg', 'add'), true);
eq(isIsomorphic('egg', 'ddd'), false);<|fim▁end|> | |
<|file_name|>htmldlistelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::utils::{DOMString, null_string, ErrorResult};
use dom::htmlelement::HTMLElement;
pub struct HTMLDListElement {
parent: HTMLElement
}
<|fim▁hole|>
pub fn SetCompact(&mut self, _compact: bool, _rv: &mut ErrorResult) {
}
pub fn Type(&self) -> DOMString {
null_string
}
pub fn SetType(&mut self, _type: &DOMString, _rv: &mut ErrorResult) {
}
}<|fim▁end|> | impl HTMLDListElement {
pub fn Compact(&self) -> bool {
false
} |
<|file_name|>php4dvd_negative.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class Untitled(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://localhost/"
self.verificationErrors = []
self.accept_next_alert = True
<|fim▁hole|> driver = self.driver
driver.get(self.base_url + "/php4dvd/")
driver.find_element_by_id("username").clear()
driver.find_element_by_id("username").send_keys("admin")
driver.find_element_by_name("password").clear()
driver.find_element_by_name("password").send_keys("admin")
driver.find_element_by_name("submit").click()
driver.find_element_by_css_selector("img[alt=\"Add movie\"]").click()
driver.find_element_by_name("name").clear()
driver.find_element_by_name("name").send_keys() #send_keys("aaaaaaaaaaaaaaaaa")
driver.find_element_by_name("year").clear()
driver.find_element_by_name("year").send_keys("1977")
driver.find_element_by_css_selector("img[alt=\"Save\"]").click()
driver.find_element_by_id("submit").click()
driver.find_element_by_css_selector("img[alt=\"Own\"]").click()
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException as e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException as e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()<|fim▁end|> | def test_untitled(self): |
<|file_name|>home.js<|end_file_name|><|fim▁begin|>/*
* Copyright 2020 Verizon Media
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import React from 'react';
import Header from '../components/header/Header';
import UserDomains from '../components/domain/UserDomains';
import API from '../api.js';
import styled from '@emotion/styled';
import Head from 'next/head';
import Search from '../components/search/Search';
import RequestUtils from '../components/utils/RequestUtils';
import Error from './_error';
import createCache from '@emotion/cache';
import { CacheProvider } from '@emotion/react';
const HomeContainerDiv = styled.div`
flex: 1 1;
`;
const HomeContentDiv = styled.div`
align-items: center;
height: 100%;
justify-content: flex-start;
width: 100%;
display: flex;
flex-direction: column;
`;
const Logo = ({ className }) => (
<img src='/static/athenz-logo.png' className={className} />
);
const LogoStyled = styled(Logo)`
height: 100px;
width: 100px;
`;
const MainLogoDiv = styled.div`
padding-top: 20px;
`;
const DetailsDiv = styled.div`
align-items: flex-start;
line-height: 1.3;
padding: 20px 0;
text-align: center;
width: 650px;
`;
const SearchContainerDiv = styled.div`
padding: 20px 0 0 0;
width: 600px;
`;
const AppContainerDiv = styled.div`
align-items: stretch;
flex-flow: row nowrap;
height: 100%;
display: flex;
justify-content: flex-start;
`;
const MainContentDiv = styled.div`
flex: 1 1 calc(100vh - 60px);
overflow: hidden;
font: 300 14px HelveticaNeue-Reg, Helvetica, Arial, sans-serif;
`;
const StyledAnchor = styled.a`
color: #3570f4;
text-decoration: none;
cursor: pointer;
`;
export default class PageHome extends React.Component {
static async getInitialProps({ req }) {
let api = API(req);
let reload = false;
let error = null;
const domains = await Promise.all([
api.listUserDomains(),
api.getHeaderDetails(),
api.getPendingDomainMembersList(),
]).catch((err) => {
let response = RequestUtils.errorCheckHelper(err);
reload = response.reload;
error = response.error;
return [{}, {}, {}];
});
return {
api,
reload,
error,
domains: domains[0],
headerDetails: domains[1],
pending: domains[2],
nonce: req.headers.rid,<|fim▁hole|>
constructor(props) {
super(props);
this.api = props.api || API();
this.cache = createCache({
key: 'athenz',
nonce: this.props.nonce,
});
}
render() {
if (this.props.reload) {
window.location.reload();
return <div />;
}
if (this.props.error) {
return <Error err={this.props.error} />;
}
return (
<CacheProvider value={this.cache}>
<div data-testid='home'>
<Head>
<title>Athenz</title>
</Head>
<Header
showSearch={false}
headerDetails={this.props.headerDetails}
pending={this.props.pending}
/>
<MainContentDiv>
<AppContainerDiv>
<HomeContainerDiv>
<HomeContentDiv>
<MainLogoDiv>
<LogoStyled />
</MainLogoDiv>
<DetailsDiv>
<span>
Athenz is an open source platform
which provides secure identity in
the form of X.509 certificate to
every workload for service
authentication (mutual TLS
authentication) and provides
fine-grained Role Based Access
Control (RBAC) for authorization.
</span>
<StyledAnchor
rel='noopener'
target='_blank'
href='https://git.ouroath.com/pages/athens/athenz-guide/'
>
Learn more
</StyledAnchor>
</DetailsDiv>
<SearchContainerDiv>
<Search />
</SearchContainerDiv>
</HomeContentDiv>
</HomeContainerDiv>
<UserDomains
domains={this.props.domains}
api={this.api}
/>
</AppContainerDiv>
</MainContentDiv>
</div>
</CacheProvider>
);
}
}<|fim▁end|> | };
} |
<|file_name|>nvidia_gpu.rs<|end_file_name|><|fim▁begin|>use std::time::Duration;
use std::process::Command;
use chan::Sender;
use block::{Block, ConfigBlock};
use config::Config;
use de::deserialize_duration;
use errors::*;
use input::{I3BarEvent, MouseButton};
use scheduler::Task;
use uuid::Uuid;
use widget::{I3BarWidget, State};
use widgets::button::ButtonWidget;
use widgets::text::TextWidget;
pub struct NvidiaGpu {
gpu_widget: ButtonWidget,
id: String,
id_fans: String,
id_memory: String,
update_interval: Duration,
gpu_id: u64,
gpu_name: String,
gpu_name_displayed: bool,
label: String,
show_utilization: Option<TextWidget>,
show_memory: Option<ButtonWidget>,
memory_total: String,
memory_total_displayed: bool,
show_temperature: Option<TextWidget>,
show_fan: Option<ButtonWidget>,
fan_speed: u64,
fan_speed_controlled: bool,
show_clocks: Option<TextWidget>,
}
#[derive(Deserialize, Debug, Default, Clone)]
#[serde(deny_unknown_fields)]
pub struct NvidiaGpuConfig {
/// Update interval in seconds
#[serde(default = "NvidiaGpuConfig::default_interval", deserialize_with = "deserialize_duration")]
pub interval: Duration,
/// Label
#[serde(default = "NvidiaGpuConfig::default_label")]
pub label: String,
/// GPU id in system
#[serde(default = "NvidiaGpuConfig::default_gpu_id")]
pub gpu_id: u64,
/// GPU utilization. In percents.
#[serde(default = "NvidiaGpuConfig::default_show_utilization")]
pub show_utilization: bool,
/// VRAM utilization.
#[serde(default = "NvidiaGpuConfig::default_show_memory")]
pub show_memory: bool,
/// Core GPU temperature. In degrees C.
#[serde(default = "NvidiaGpuConfig::default_show_temperature")]
pub show_temperature: bool,
/// Fan speed. In percents.
#[serde(default = "NvidiaGpuConfig::default_show_fan_speed")]
pub show_fan_speed: bool,
/// GPU clocks. In percents.
#[serde(default = "NvidiaGpuConfig::default_show_clocks")]
pub show_clocks: bool,
}
impl NvidiaGpuConfig {
fn default_interval() -> Duration {
Duration::from_secs(3)
}
fn default_label() -> String {
"".to_string()
}
fn default_gpu_id() -> u64 {
0
}
fn default_show_utilization() -> bool {
true
}
fn default_show_memory() -> bool {
true
}
fn default_show_temperature() -> bool {
true
}
fn default_show_fan_speed() -> bool {
false
}
fn default_show_clocks() -> bool {
false
}
}
impl ConfigBlock for NvidiaGpu {
type Config = NvidiaGpuConfig;
fn new(block_config: Self::Config, config: Config, _tx_update_request: Sender<Task>) -> Result<Self> {
let id = Uuid::new_v4().simple().to_string();
let id_memory = Uuid::new_v4().simple().to_string();
let id_fans = Uuid::new_v4().simple().to_string();
let mut output = Command::new("nvidia-smi")
.args(
&[
"-i", &block_config.gpu_id.to_string(),
"--query-gpu=name,memory.total",
"--format=csv,noheader,nounits"
],
)
.output()
.block_error("gpu", "Failed to execute nvidia-smi.")?
.stdout;
output.pop(); // Remove trailing newline.
let result_str = String::from_utf8(output).unwrap();
let result: Vec<&str> = result_str.split(", ").collect();
Ok(NvidiaGpu {
id: id.clone(),
id_fans: id_fans.clone(),
id_memory: id_memory.clone(),
update_interval: block_config.interval,
gpu_widget: ButtonWidget::new(config.clone(), &id).with_icon("gpu"),
gpu_name: result[0].to_string(),
gpu_name_displayed: false,
gpu_id: block_config.gpu_id,
label: block_config.label,
show_utilization: if block_config.show_utilization {
Some(TextWidget::new(config.clone())) } else {
None
},
show_memory: if block_config.show_memory {
Some(ButtonWidget::new(config.clone(), &id_memory)) } else {
None
},
memory_total: result[1].to_string(),
memory_total_displayed: false,
show_temperature: if block_config.show_temperature {
Some(TextWidget::new(config.clone())) } else {
None
},
show_fan: if block_config.show_fan_speed {
Some(ButtonWidget::new(config.clone(), &id_fans)) } else {
None
},
fan_speed: 0,
fan_speed_controlled: false,
show_clocks: if block_config.show_clocks {
Some(TextWidget::new(config.clone())) } else {
None
},
})
}
}
impl Block for NvidiaGpu {
fn update(&mut self) -> Result<Option<Duration>> {
let mut params = String::new();
if self.show_utilization.is_some() {
params += "utilization.gpu,";
}
if self.show_memory.is_some() {
params += "memory.used,";
}
if self.show_temperature.is_some() {
params += "temperature.gpu,";
}
if self.show_fan.is_some() {
params += "fan.speed,";
}
if self.show_clocks.is_some() {
params += "clocks.current.graphics,";
}
let mut output = Command::new("nvidia-smi")
.args(
&[
"-i", &self.gpu_id.to_string(),
&format!("--query-gpu={}", params),
"--format=csv,noheader,nounits"
],
)
.output()
.block_error("gpu", "Failed to execute nvidia-smi.")?
.stdout;
output.pop(); // Remove trailing newline.
let result_str = String::from_utf8(output).unwrap();
// TODO
// Change to 'retain' in rust 1.26
let result: Vec<&str> = result_str.split(", ").collect();
let mut count: usize = 0;
if let Some(ref mut utilization_widget) = self.show_utilization {
utilization_widget.set_text(format!("{}%", result[count]));
count += 1;
}
if let Some(ref mut memory_widget) = self.show_memory {
if self.memory_total_displayed {
memory_widget.set_text(format!("{}MB", self.memory_total));
} else {
memory_widget.set_text(format!("{}MB", result[count]));
}
count += 1;
}
if let Some(ref mut temperature_widget) = self.show_temperature {
let temp = result[count].parse::<u64>().unwrap();
temperature_widget.set_state(match temp {
0...50 => State::Good,
51...70 => State::Idle,
71...75 => State::Info,
76...80 => State::Warning,
_ => State::Critical,
});
temperature_widget.set_text(format!("{:02}°C", temp));
count += 1;
}
if let Some(ref mut fan_widget) = self.show_fan {
self.fan_speed = result[count].parse::<u64>().unwrap();
fan_widget.set_text(format!("{:02}%", self.fan_speed));
count += 1;
}
if let Some(ref mut clocks_widget) = self.show_clocks {
clocks_widget.set_text(format!("{}MHz", result[count]));
}
if self.gpu_name_displayed {
self.gpu_widget.set_text(self.gpu_name.as_ref());
} else {
self.gpu_widget.set_text(self.label.as_ref());
}
Ok(Some(self.update_interval))
}
fn view(&self) -> Vec<&I3BarWidget> {
let mut widgets: Vec<&I3BarWidget> = Vec::new();
widgets.push(&self.gpu_widget);
if let Some(ref utilization_widget) = self.show_utilization {
widgets.push(utilization_widget);
}
if let Some(ref memory_widget) = self.show_memory {
widgets.push(memory_widget);
}
if let Some(ref temperature_widget) = self.show_temperature {
widgets.push(temperature_widget);
}
if let Some(ref fan_widget) = self.show_fan {
widgets.push(fan_widget);
}
if let Some(ref clocks_widget) = self.show_clocks {
widgets.push(clocks_widget);
}
widgets
}
fn click(&mut self, e: &I3BarEvent) -> Result<()> {
if let Some(ref name) = e.name {
let event_name = name.as_str();
if event_name == self.id {
self.gpu_name_displayed = match e.button {
MouseButton::Left => !self.gpu_name_displayed,
_ => self.gpu_name_displayed
};
if self.gpu_name_displayed {
self.gpu_widget.set_text(self.gpu_name.as_ref());
} else {
self.gpu_widget.set_text(self.label.as_ref());
}
}
if event_name == self.id_memory {
self.memory_total_displayed = match e.button {
MouseButton::Left => !self.memory_total_displayed,
_ => self.gpu_name_displayed
};
if let Some(ref mut memory_widget) = self.show_memory {
if self.memory_total_displayed {
memory_widget.set_text(format!("{}MB", self.memory_total));
} else {
let mut output = Command::new("nvidia-smi")
.args(
&[
"-i", &self.gpu_id.to_string(),
"--query-gpu=memory.used",
"--format=csv,noheader,nounits"
],
)
.output()
.block_error("gpu", "Failed to execute nvidia-smi.")?
.stdout;<|fim▁hole|> let result_str = String::from_utf8(output).unwrap();
memory_widget.set_text(format!("{}MB", result_str));
}
}
}
if event_name == self.id_fans {
let mut controlled_changed = false;
let mut new_fan_speed = self.fan_speed;
match e.button {
MouseButton::Left => {
self.fan_speed_controlled = !self.fan_speed_controlled;
controlled_changed = true;
}
MouseButton::WheelUp => {
if self.fan_speed < 100 && self.fan_speed_controlled {
new_fan_speed += 1;
}
}
MouseButton::WheelDown => {
if self.fan_speed > 0 && self.fan_speed_controlled {
new_fan_speed -= 1;
}
}
_ => {}
};
if let Some(ref mut fan_widget) = self.show_fan {
if controlled_changed {
if self.fan_speed_controlled {
Command::new("nvidia-settings")
.args(
&[
"-a",
&format!("[gpu:{}]/GPUFanControlState=1",
self.gpu_id),
"-a",
&format!("[fan:{}]/GPUTargetFanSpeed={}",
self.gpu_id,
self.fan_speed),
],
)
.output()
.block_error("gpu", "Failed to execute nvidia-settings.")?;
fan_widget.set_text(format!("{:02}%", self.fan_speed));
fan_widget.set_state(State::Warning);
} else {
Command::new("nvidia-settings")
.args(
&[
"-a",
&format!("[gpu:{}]/GPUFanControlState=0",
self.gpu_id),
],
)
.output()
.block_error("gpu", "Failed to execute nvidia-settings.")?;
fan_widget.set_state(State::Idle);
}
} else if self.fan_speed_controlled {
Command::new("nvidia-settings")
.args(
&[
"-a",
&format!("[fan:{}]/GPUTargetFanSpeed={}",
self.gpu_id,
new_fan_speed),
],
)
.output()
.block_error("gpu", "Failed to execute nvidia-settings.")?;
self.fan_speed = new_fan_speed;
fan_widget.set_text(format!("{:02}%", new_fan_speed));
}
}
}
}
Ok(())
}
fn id(&self) -> &str {
&self.id
}
}<|fim▁end|> | output.pop(); // Remove trailing newline. |
<|file_name|>act_tests.py<|end_file_name|><|fim▁begin|>from django.test import TestCase
from apps.taxonomy.models import Act
from apps.taxonomy.tests import factories
from apps.taxonomy.tests.base import TaxonomyBaseTestMixin
class TestActCreation(TestCase):
def setUp(self):
super(TestActCreation, self).setUp()
factories.TaxonRankFactory(id=0)
def test_creates_act_for_new_taxon(self):
taxonnode = factories.TaxonNodeFactory()
taxonnode.post_created()
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="new_taxon").count(), 1)
def test_create_edit_name_act(self):
taxonnode = factories.TaxonNodeFactory()
taxonnode.epithet = "new epithet"
taxonnode.save()
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="edit_name").count(), 1)
def test_create_change_parent_act(self):
taxonnode = TaxonomyBaseTestMixin.create_working_taxonnode()
taxonnode_new_parent = TaxonomyBaseTestMixin.create_working_taxonnode(taxonnode.tree)
taxonnode.post_changed(parent=taxonnode_new_parent)
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="change_parent").count(), 1)
def test_not_create_change_parent_act_when_did_not_change(self):
taxonnode = TaxonomyBaseTestMixin.create_working_taxonnode()
taxonnode_parent = TaxonomyBaseTestMixin.create_working_taxonnode(taxonnode.tree)
factories.EdgeFactory(ancestor=taxonnode_parent, descendant=taxonnode)
taxonnode.post_changed(parent=taxonnode_parent)
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="change_parent").count(), 0)
def test_create_change_to_synonym_act(self):
valid_name = factories.TaxonNodeFactory()
taxonnode = factories.TaxonNodeFactory(tree=valid_name.tree)
taxonnode.valid_name = valid_name
taxonnode.synonym_type = "synonym"
taxonnode.save()
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="marked_as_synonym").count(), 1)
def test_create_change_to_basionym_act(self):
valid_name = factories.TaxonNodeFactory()
taxonnode = factories.TaxonNodeFactory(tree=valid_name.tree)
taxonnode.valid_name = valid_name
taxonnode.synonym_type = "basionym"
taxonnode.save()
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="marked_as_basionym").count(), 1)<|fim▁hole|> taxonnode.nomenclatural_status = "established"
taxonnode.save()
self.assertEqual(Act.objects.filter(taxon_node=taxonnode, type="change_nomen_status").count(), 1)<|fim▁end|> |
def test_create_change_nomen_status_act(self):
taxonnode = factories.TaxonNodeFactory() |
<|file_name|>log.js<|end_file_name|><|fim▁begin|>'use strict';
const chalk = require('chalk');
function log(type) {
if (arguments.length < 2) {
return;
}
const msgs = Array.from(arguments).slice(1);
let log = ['[rise]'];
switch(type) {
case 'error':
log.push(chalk.red('ERRO'));
break;
case 'warn':
log.push(chalk.yellow('WARN'));
break;
case 'info':
log.push(chalk.blue('INFO'));<|fim▁hole|> case 'debug':
log.push(chalk.gray('DEBU'));
break;
}
log = log.concat(msgs.map(function(m) {
if (m instanceof Error) {
return m.stack;
}
return m;
}));
log.push("\n");
const msg = log.join(' ');
if (process.env.NODE_ENV === 'test') {
// Don't log in tests.
return;
}
if (type === 'error') {
process.stderr.write(msg);
} else {
process.stdout.write(msg);
}
}
module.exports = {
error(/* msg */) {
log.apply(this, ['error'].concat(Array.from(arguments)));
},
warn(/* msg */) {
log.apply(this, ['warn'].concat(Array.from(arguments)));
},
info(/* msg */) {
log.apply(this, ['info'].concat(Array.from(arguments)));
},
debug(/* msg */) {
log.apply(this, ['debug'].concat(Array.from(arguments)));
}
};<|fim▁end|> | break; |
<|file_name|>rules.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The html5ever Project Developers. See the
// COPYRIGHT file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! The tree builder rules, as a single, enormous nested match expression.
use core::prelude::*;
use tree_builder::types::*;
use tree_builder::tag_sets::*;
use tree_builder::actions::TreeBuilderActions;
use tree_builder::interface::{TreeSink, Quirks, AppendNode};
use tokenizer::{Tag, StartTag, EndTag};
use tokenizer::states::{Rcdata, Rawtext, ScriptData, Plaintext};
use util::str::is_ascii_whitespace;
use core::mem::replace;
use collections::MutableSeq;
use collections::string::String;
use collections::str::Slice;
use string_cache::Atom;
fn any_not_whitespace(x: &String) -> bool {
// FIXME: this might be much faster as a byte scan
x.as_slice().chars().any(|c| !is_ascii_whitespace(c))
}
// This goes in a trait so that we can control visibility.
pub trait TreeBuilderStep<Handle> {
fn step(&mut self, mode: InsertionMode, token: Token) -> ProcessResult;
}
#[doc(hidden)]
impl<'sink, Handle: Clone, Sink: TreeSink<Handle>>
TreeBuilderStep<Handle> for super::TreeBuilder<'sink, Handle, Sink> {
fn step(&mut self, mode: InsertionMode, token: Token) -> ProcessResult {
self.debug_step(mode, &token);
match mode {
//§ the-initial-insertion-mode
Initial => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => Done,
CommentToken(text) => self.append_comment_to_doc(text),
token => {
if !self.opts.iframe_srcdoc {
self.unexpected(&token);
self.set_quirks_mode(Quirks);
}
Reprocess(BeforeHtml, token)
}
}),
<|fim▁hole|> CommentToken(text) => self.append_comment_to_doc(text),
tag @ <html> => {
self.create_root(tag.attrs);
self.mode = BeforeHead;
Done
}
</head> </body> </html> </br> => else,
tag @ </_> => self.unexpected(&tag),
token => {
self.create_root(vec!());
Reprocess(BeforeHead, token)
}
}),
//§ the-before-head-insertion-mode
BeforeHead => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => Done,
CommentToken(text) => self.append_comment(text),
<html> => self.step(InBody, token),
tag @ <head> => {
self.head_elem = Some(self.insert_element_for(tag));
self.mode = InHead;
Done
}
</head> </body> </html> </br> => else,
tag @ </_> => self.unexpected(&tag),
token => {
self.head_elem = Some(self.insert_phantom(atom!(head)));
Reprocess(InHead, token)
}
}),
//§ parsing-main-inhead
InHead => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, text) => self.append_text(text),
CommentToken(text) => self.append_comment(text),
<html> => self.step(InBody, token),
tag @ <base> <basefont> <bgsound> <link> <meta> => {
// FIXME: handle <meta charset=...> and <meta http-equiv="Content-Type">
self.insert_and_pop_element_for(tag);
DoneAckSelfClosing
}
tag @ <title> => {
self.parse_raw_data(tag, Rcdata);
Done
}
tag @ <noframes> <style> <noscript> => {
if (!self.opts.scripting_enabled) && (tag.name == atom!(noscript)) {
self.insert_element_for(tag);
self.mode = InHeadNoscript;
} else {
self.parse_raw_data(tag, Rawtext);
}
Done
}
tag @ <script> => {
let elem = self.sink.create_element(ns!(HTML), atom!(script), tag.attrs);
if self.opts.fragment {
self.sink.mark_script_already_started(elem.clone());
}
self.insert_appropriately(AppendNode(elem.clone()));
self.open_elems.push(elem);
self.to_raw_text_mode(ScriptData);
Done
}
</head> => {
self.pop();
self.mode = AfterHead;
Done
}
</body> </html> </br> => else,
<template> => fail!("FIXME: <template> not implemented"),
</template> => fail!("FIXME: <template> not implemented"),
<head> => self.unexpected(&token),
tag @ </_> => self.unexpected(&tag),
token => {
self.pop();
Reprocess(AfterHead, token)
}
}),
//§ parsing-main-inheadnoscript
InHeadNoscript => match_token!(token {
<html> => self.step(InBody, token),
</noscript> => {
self.pop();
self.mode = InHead;
Done
},
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => self.step(InHead, token),
CommentToken(_) => self.step(InHead, token),
<basefont> <bgsound> <link> <meta> <noframes> <style>
=> self.step(InHead, token),
</br> => else,
<head> <noscript> => self.unexpected(&token),
tag @ </_> => self.unexpected(&tag),
token => {
self.unexpected(&token);
self.pop();
Reprocess(InHead, token)
},
}),
//§ the-after-head-insertion-mode
AfterHead => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, text) => self.append_text(text),
CommentToken(text) => self.append_comment(text),
<html> => self.step(InBody, token),
tag @ <body> => {
self.insert_element_for(tag);
self.frameset_ok = false;
self.mode = InBody;
Done
}
tag @ <frameset> => {
self.insert_element_for(tag);
self.mode = InFrameset;
Done
}
<base> <basefont> <bgsound> <link> <meta>
<noframes> <script> <style> <template> <title> => {
self.unexpected(&token);
let head = self.head_elem.as_ref().expect("no head element").clone();
self.push(&head);
let result = self.step(InHead, token);
self.remove_from_stack(&head);
result
}
</template> => self.step(InHead, token),
</body> </html> </br> => else,
<head> => self.unexpected(&token),
tag @ </_> => self.unexpected(&tag),
token => {
self.insert_phantom(atom!(body));
Reprocess(InBody, token)
}
}),
//§ parsing-main-inbody
InBody => match_token!(token {
NullCharacterToken => self.unexpected(&token),
CharacterTokens(_, text) => {
self.reconstruct_formatting();
if any_not_whitespace(&text) {
self.frameset_ok = false;
}
self.append_text(text)
}
CommentToken(text) => self.append_comment(text),
tag @ <html> => {
self.unexpected(&tag);
// FIXME: <template>
let top = self.html_elem();
self.sink.add_attrs_if_missing(top, tag.attrs);
Done
}
<base> <basefont> <bgsound> <link> <meta> <noframes>
<script> <style> <template> <title> </template> => {
self.step(InHead, token)
}
tag @ <body> => {
self.unexpected(&tag);
// FIXME: <template>
match self.body_elem() {
None => (),
Some(node) => {
self.frameset_ok = false;
self.sink.add_attrs_if_missing(node, tag.attrs)
}
}
Done
}
tag @ <frameset> => {
self.unexpected(&tag);
if !self.frameset_ok { return Done; }
// Can't use unwrap_or_return!() due to rust-lang/rust#16617.
let body = match self.body_elem() {
None => return Done,
Some(x) => x,
};
self.sink.remove_from_parent(body);
// FIXME: can we get here in the fragment case?
// What to do with the first element then?
self.open_elems.truncate(1);
self.insert_element_for(tag);
self.mode = InFrameset;
Done
}
EOFToken => {
// FIXME: <template>
self.check_body_end();
self.stop_parsing()
}
</body> => {
if self.in_scope_named(default_scope, atom!(body)) {
self.check_body_end();
self.mode = AfterBody;
} else {
self.sink.parse_error(Slice("</body> with no <body> in scope"));
}
Done
}
</html> => {
if self.in_scope_named(default_scope, atom!(body)) {
self.check_body_end();
Reprocess(AfterBody, token)
} else {
self.sink.parse_error(Slice("</html> with no <body> in scope"));
Done
}
}
tag @ <address> <article> <aside> <blockquote> <center> <details> <dialog>
<dir> <div> <dl> <fieldset> <figcaption> <figure> <footer> <header>
<hgroup> <main> <menu> <nav> <ol> <p> <section> <summary> <ul> => {
self.close_p_element_in_button_scope();
self.insert_element_for(tag);
Done
}
tag @ <h1> <h2> <h3> <h4> <h5> <h6> => {
self.close_p_element_in_button_scope();
if self.current_node_in(heading_tag) {
self.sink.parse_error(Slice("nested heading tags"));
self.pop();
}
self.insert_element_for(tag);
Done
}
tag @ <pre> <listing> => {
self.close_p_element_in_button_scope();
self.insert_element_for(tag);
self.ignore_lf = true;
self.frameset_ok = false;
Done
}
tag @ <form> => {
// FIXME: <template>
if self.form_elem.is_some() {
self.sink.parse_error(Slice("nested forms"));
} else {
self.close_p_element_in_button_scope();
let elem = self.insert_element_for(tag);
// FIXME: <template>
self.form_elem = Some(elem);
}
Done
}
tag @ <li> <dd> <dt> => {
declare_tag_set!(close_list = li)
declare_tag_set!(close_defn = dd dt)
declare_tag_set!(extra_special = special_tag - address div p)
let can_close = match tag.name {
atom!(li) => close_list,
atom!(dd) | atom!(dt) => close_defn,
_ => unreachable!(),
};
self.frameset_ok = false;
let mut to_close = None;
for node in self.open_elems.iter().rev() {
let nsname = self.sink.elem_name(node.clone());
if can_close(nsname.clone()) {
let (_, name) = nsname;
to_close = Some(name);
break;
}
if extra_special(nsname.clone()) {
break;
}
}
match to_close {
Some(name) => {
self.generate_implied_end_except(name.clone());
self.expect_to_close(name);
}
None => (),
}
self.close_p_element_in_button_scope();
self.insert_element_for(tag);
Done
}
tag @ <plaintext> => {
self.close_p_element_in_button_scope();
self.insert_element_for(tag);
self.next_tokenizer_state = Some(Plaintext);
Done
}
tag @ <button> => {
if self.in_scope_named(default_scope, atom!(button)) {
self.sink.parse_error(Slice("nested buttons"));
self.generate_implied_end(cursory_implied_end);
self.pop_until_named(atom!(button));
}
self.reconstruct_formatting();
self.insert_element_for(tag);
self.frameset_ok = false;
Done
}
tag @ </address> </article> </aside> </blockquote> </button> </center>
</details> </dialog> </dir> </div> </dl> </fieldset> </figcaption>
</figure> </footer> </header> </hgroup> </listing> </main> </menu>
</nav> </ol> </pre> </section> </summary> </ul> => {
if !self.in_scope_named(default_scope, tag.name.clone()) {
self.unexpected(&tag);
} else {
self.generate_implied_end(cursory_implied_end);
self.expect_to_close(tag.name);
}
Done
}
</form> => {
// FIXME: <template>
// Can't use unwrap_or_return!() due to rust-lang/rust#16617.
let node = match self.form_elem.take() {
None => {
self.sink.parse_error(Slice("Null form element pointer on </form>"));
return Done;
}
Some(x) => x,
};
if !self.in_scope(default_scope,
|n| self.sink.same_node(node.clone(), n)) {
self.sink.parse_error(Slice("Form element not in scope on </form>"));
return Done;
}
self.generate_implied_end(cursory_implied_end);
let current = self.current_node();
self.remove_from_stack(&node);
if !self.sink.same_node(current, node) {
self.sink.parse_error(Slice("Bad open element on </form>"));
}
Done
}
</p> => {
if !self.in_scope_named(button_scope, atom!(p)) {
self.sink.parse_error(Slice("No <p> tag to close"));
self.insert_phantom(atom!(p));
}
self.close_p_element();
Done
}
tag @ </li> </dd> </dt> => {
let scope = match tag.name {
atom!(li) => list_item_scope,
_ => default_scope,
};
if self.in_scope_named(|x| scope(x), tag.name.clone()) {
self.generate_implied_end_except(tag.name.clone());
self.expect_to_close(tag.name);
} else {
self.sink.parse_error(Slice("No matching tag to close"));
}
Done
}
tag @ </h1> </h2> </h3> </h4> </h5> </h6> => {
if self.in_scope(default_scope, |n| self.elem_in(n.clone(), heading_tag)) {
self.generate_implied_end(cursory_implied_end);
if !self.current_node_named(tag.name) {
self.sink.parse_error(Slice("Closing wrong heading tag"));
}
self.pop_until(heading_tag);
} else {
self.sink.parse_error(Slice("No heading tag to close"));
}
Done
}
tag @ <a> => {
let mut to_remove = vec!();
for (i, handle, _) in self.active_formatting_end_to_marker() {
if self.html_elem_named(handle.clone(), atom!(a)) {
to_remove.push((i, handle.clone()));
}
}
if !to_remove.is_empty() {
self.unexpected(&tag);
self.adoption_agency(atom!(a));
// FIXME: quadratic time
for (i, handle) in to_remove.into_iter() {
self.remove_from_stack(&handle);
self.active_formatting.remove(i);
// We iterated backwards from the end above, so
// we don't need to adjust the indices after each
// removal.
}
}
self.reconstruct_formatting();
self.create_formatting_element_for(tag);
Done
}
tag @ <b> <big> <code> <em> <font> <i> <s> <small> <strike> <strong> <tt> <u> => {
self.reconstruct_formatting();
self.create_formatting_element_for(tag);
Done
}
tag @ <nobr> => {
self.reconstruct_formatting();
if self.in_scope_named(default_scope, atom!(nobr)) {
self.sink.parse_error(Slice("Nested <nobr>"));
self.adoption_agency(atom!(nobr));
self.reconstruct_formatting();
}
self.create_formatting_element_for(tag);
Done
}
tag @ </a> </b> </big> </code> </em> </font> </i> </nobr>
</s> </small> </strike> </strong> </tt> </u> => {
self.adoption_agency(tag.name);
Done
}
tag @ <applet> <marquee> <object> => {
self.reconstruct_formatting();
self.insert_element_for(tag);
self.active_formatting.push(Marker);
self.frameset_ok = false;
Done
}
tag @ </applet> </marquee> </object> => {
if !self.in_scope_named(default_scope, tag.name.clone()) {
self.unexpected(&tag);
} else {
self.generate_implied_end(cursory_implied_end);
self.expect_to_close(tag.name);
self.clear_active_formatting_to_marker();
}
Done
}
tag @ <table> => {
if self.quirks_mode != Quirks {
self.close_p_element_in_button_scope();
}
self.insert_element_for(tag);
self.frameset_ok = false;
self.mode = InTable;
Done
}
tag @ </br> => {
self.unexpected(&tag);
self.step(InBody, TagToken(Tag {
kind: StartTag,
attrs: vec!(),
..tag
}))
}
tag @ <area> <br> <embed> <img> <keygen> <wbr> <input> => {
let keep_frameset_ok = match tag.name {
atom!(input) => self.is_type_hidden(&tag),
_ => false,
};
self.reconstruct_formatting();
self.insert_and_pop_element_for(tag);
if !keep_frameset_ok {
self.frameset_ok = false;
}
DoneAckSelfClosing
}
tag @ <menuitem> <param> <source> <track> => {
self.insert_and_pop_element_for(tag);
DoneAckSelfClosing
}
tag @ <hr> => {
self.close_p_element_in_button_scope();
self.insert_and_pop_element_for(tag);
self.frameset_ok = false;
DoneAckSelfClosing
}
tag @ <image> => {
self.unexpected(&tag);
self.step(InBody, TagToken(Tag {
name: atom!(img),
..tag
}))
}
<isindex> => fail!("FIXME: <isindex> not implemented"),
tag @ <textarea> => {
self.ignore_lf = true;
self.frameset_ok = false;
self.parse_raw_data(tag, Rcdata);
Done
}
tag @ <xmp> => {
self.close_p_element_in_button_scope();
self.reconstruct_formatting();
self.frameset_ok = false;
self.parse_raw_data(tag, Rawtext);
Done
}
tag @ <iframe> => {
self.frameset_ok = false;
self.parse_raw_data(tag, Rawtext);
Done
}
tag @ <noembed> => {
self.parse_raw_data(tag, Rawtext);
Done
}
// <noscript> handled in wildcard case below
tag @ <select> => {
self.reconstruct_formatting();
self.insert_element_for(tag);
self.frameset_ok = false;
// NB: mode == InBody but possibly self.mode != mode, if
// we're processing "as in the rules for InBody".
self.mode = match self.mode {
InTable | InCaption | InTableBody
| InRow | InCell => InSelectInTable,
_ => InSelect,
};
Done
}
tag @ <optgroup> <option> => {
if self.current_node_named(atom!(option)) {
self.pop();
}
self.reconstruct_formatting();
self.insert_element_for(tag);
Done
}
tag @ <rp> <rt> => {
if self.in_scope_named(default_scope, atom!(ruby)) {
self.generate_implied_end(cursory_implied_end);
}
if !self.current_node_named(atom!(ruby)) {
self.unexpected(&tag);
}
self.insert_element_for(tag);
Done
}
<math> => fail!("FIXME: MathML not implemented"),
<svg> => fail!("FIXME: SVG not implemented"),
<caption> <col> <colgroup> <frame> <head>
<tbody> <td> <tfoot> <th> <thead> <tr> => {
self.unexpected(&token);
Done
}
tag @ <_> => {
if self.opts.scripting_enabled && tag.name == atom!(noscript) {
self.parse_raw_data(tag, Rawtext);
} else {
self.reconstruct_formatting();
self.insert_element_for(tag);
}
Done
}
tag @ </_> => {
// Look back for a matching open element.
let mut match_idx = None;
for (i, elem) in self.open_elems.iter().enumerate().rev() {
if self.html_elem_named(elem.clone(), tag.name.clone()) {
match_idx = Some(i);
break;
}
if self.elem_in(elem.clone(), special_tag) {
self.sink.parse_error(Slice("Found special tag while closing generic tag"));
return Done;
}
}
// Can't use unwrap_or_return!() due to rust-lang/rust#16617.
let match_idx = match match_idx {
None => {
// I believe this is impossible, because the root
// <html> element is in special_tag.
self.unexpected(&tag);
return Done;
}
Some(x) => x,
};
self.generate_implied_end_except(tag.name.clone());
if match_idx != self.open_elems.len() - 1 {
// mis-nested tags
self.unexpected(&tag);
}
self.open_elems.truncate(match_idx);
Done
}
// FIXME: This should be unreachable, but match_token! requires a
// catch-all case.
_ => fail!("impossible case in InBody mode"),
}),
//§ parsing-main-incdata
Text => match_token!(token {
CharacterTokens(_, text) => self.append_text(text),
EOFToken => {
self.unexpected(&token);
if self.current_node_named(atom!(script)) {
let current = self.current_node();
self.sink.mark_script_already_started(current);
}
self.pop();
Reprocess(self.orig_mode.take_unwrap(), token)
}
tag @ </_> => {
if tag.name == atom!(script) {
h5e_warn!("FIXME: </script> not implemented");
}
self.pop();
self.mode = self.orig_mode.take_unwrap();
Done
}
// The spec doesn't say what to do here.
// Other tokens are impossible?
_ => fail!("impossible case in Text mode"),
}),
//§ parsing-main-intable
InTable => match_token!(token {
// FIXME: hack, should implement pat | pat for match_token!() instead
NullCharacterToken => self.process_chars_in_table(token),
CharacterTokens(..) => self.process_chars_in_table(token),
CommentToken(text) => self.append_comment(text),
tag @ <caption> => {
self.pop_until_current(table_scope);
self.active_formatting.push(Marker);
self.insert_element_for(tag);
self.mode = InCaption;
Done
}
tag @ <colgroup> => {
self.pop_until_current(table_scope);
self.insert_element_for(tag);
self.mode = InColumnGroup;
Done
}
<col> => {
self.pop_until_current(table_scope);
self.insert_phantom(atom!(colgroup));
Reprocess(InColumnGroup, token)
}
tag @ <tbody> <tfoot> <thead> => {
self.pop_until_current(table_scope);
self.insert_element_for(tag);
self.mode = InTableBody;
Done
}
<td> <th> <tr> => {
self.pop_until_current(table_scope);
self.insert_phantom(atom!(tbody));
Reprocess(InTableBody, token)
}
<table> => {
self.unexpected(&token);
if self.in_scope_named(table_scope, atom!(table)) {
self.pop_until_named(atom!(table));
Reprocess(self.reset_insertion_mode(), token)
} else {
Done
}
}
</table> => {
if self.in_scope_named(table_scope, atom!(table)) {
self.pop_until_named(atom!(table));
self.mode = self.reset_insertion_mode();
} else {
self.unexpected(&token);
}
Done
}
</body> </caption> </col> </colgroup> </html>
</tbody> </td> </tfoot> </th> </thead> </tr> =>
self.unexpected(&token),
<style> <script> <template> </template>
=> self.step(InHead, token),
tag @ <input> => {
self.unexpected(&tag);
if self.is_type_hidden(&tag) {
self.insert_and_pop_element_for(tag);
DoneAckSelfClosing
} else {
self.foster_parent_in_body(TagToken(tag))
}
}
tag @ <form> => {
self.unexpected(&tag);
// FIXME: <template>
if self.form_elem.is_none() {
self.form_elem = Some(self.insert_and_pop_element_for(tag));
}
Done
}
EOFToken => self.step(InBody, token),
token => {
self.unexpected(&token);
self.foster_parent_in_body(token)
}
}),
//§ parsing-main-intabletext
InTableText => match_token!(token {
NullCharacterToken => self.unexpected(&token),
CharacterTokens(split, text) => {
self.pending_table_text.push((split, text));
Done
}
token => {
let pending = replace(&mut self.pending_table_text, vec!());
let contains_nonspace = pending.iter().any(|&(split, ref text)| {
match split {
Whitespace => false,
NotWhitespace => true,
NotSplit => any_not_whitespace(text),
}
});
if contains_nonspace {
self.sink.parse_error(Slice("Non-space table text"));
for (split, text) in pending.into_iter() {
match self.foster_parent_in_body(CharacterTokens(split, text)) {
Done => (),
_ => fail!("not prepared to handle this!"),
}
}
} else {
for (_, text) in pending.into_iter() {
self.append_text(text);
}
}
Reprocess(self.orig_mode.take_unwrap(), token)
}
}),
//§ parsing-main-incaption
InCaption => match_token!(token {
tag @ <caption> <col> <colgroup> <tbody> <td> <tfoot>
<th> <thead> <tr> </table> </caption> => {
if self.in_scope_named(table_scope, atom!(caption)) {
self.generate_implied_end(cursory_implied_end);
self.expect_to_close(atom!(caption));
self.clear_active_formatting_to_marker();
match tag {
Tag { kind: EndTag, name: atom!(caption), .. } => {
self.mode = InTable;
Done
}
_ => Reprocess(InTable, TagToken(tag))
}
} else {
self.unexpected(&tag);
Done
}
}
</body> </col> </colgroup> </html> </tbody>
</td> </tfoot> </th> </thead> </tr> => self.unexpected(&token),
token => self.step(InBody, token),
}),
//§ parsing-main-incolgroup
InColumnGroup => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, text) => self.append_text(text),
CommentToken(text) => self.append_comment_to_html(text),
<html> => self.step(InBody, token),
tag @ <col> => {
self.insert_and_pop_element_for(tag);
DoneAckSelfClosing
}
</colgroup> => {
if self.current_node_named(atom!(colgroup)) {
self.pop();
self.mode = InTable;
} else {
self.unexpected(&token);
}
Done
}
</col> => self.unexpected(&token),
<template> </template> => self.step(InHead, token),
EOFToken => self.step(InBody, token),
token => {
if self.current_node_named(atom!(colgroup)) {
self.pop();
} else {
self.unexpected(&token);
}
Reprocess(InTable, token)
}
}),
//§ parsing-main-intbody
InTableBody => match_token!(token {
tag @ <tr> => {
self.pop_until_current(table_body_context);
self.insert_element_for(tag);
self.mode = InRow;
Done
}
<th> <td> => {
self.unexpected(&token);
self.pop_until_current(table_body_context);
self.insert_phantom(atom!(tr));
Reprocess(InRow, token)
}
tag @ </tbody> </tfoot> </thead> => {
if self.in_scope_named(table_scope, tag.name.clone()) {
self.pop_until_current(table_body_context);
self.pop();
self.mode = InTable;
} else {
self.unexpected(&tag);
}
Done
}
<caption> <col> <colgroup> <tbody> <tfoot> <thead> </table> => {
declare_tag_set!(table_outer = table tbody tfoot)
if self.in_scope(table_scope, |e| self.elem_in(e, table_outer)) {
self.pop_until_current(table_body_context);
self.pop();
Reprocess(InTable, token)
} else {
self.unexpected(&token)
}
}
</body> </caption> </col> </colgroup> </html> </td> </th> </tr>
=> self.unexpected(&token),
token => self.step(InTable, token),
}),
//§ parsing-main-intr
InRow => match_token!(token {
tag @ <th> <td> => {
self.pop_until_current(table_row_context);
self.insert_element_for(tag);
self.mode = InCell;
self.active_formatting.push(Marker);
Done
}
</tr> => {
if self.in_scope_named(table_scope, atom!(tr)) {
self.pop_until_current(table_row_context);
let node = self.pop();
self.assert_named(node, atom!(tr));
self.mode = InTableBody;
} else {
self.unexpected(&token);
}
Done
}
<caption> <col> <colgroup> <tbody> <tfoot> <thead> <tr> </table> => {
if self.in_scope_named(table_scope, atom!(tr)) {
self.pop_until_current(table_row_context);
let node = self.pop();
self.assert_named(node, atom!(tr));
Reprocess(InTableBody, token)
} else {
self.unexpected(&token)
}
}
tag @ </tbody> </tfoot> </thead> => {
if self.in_scope_named(table_scope, tag.name.clone()) {
if self.in_scope_named(table_scope, atom!(tr)) {
self.pop_until_current(table_row_context);
let node = self.pop();
self.assert_named(node, atom!(tr));
Reprocess(InTableBody, TagToken(tag))
} else {
Done
}
} else {
self.unexpected(&tag)
}
}
</body> </caption> </col> </colgroup> </html> </td> </th>
=> self.unexpected(&token),
token => self.step(InTable, token),
}),
//§ parsing-main-intd
InCell => match_token!(token {
tag @ </td> </th> => {
if self.in_scope_named(table_scope, tag.name.clone()) {
self.generate_implied_end(cursory_implied_end);
self.expect_to_close(tag.name);
self.clear_active_formatting_to_marker();
self.mode = InRow;
} else {
self.unexpected(&tag);
}
Done
}
<caption> <col> <colgroup> <tbody> <td> <tfoot> <th> <thead> <tr> => {
if self.in_scope(table_scope, |n| self.elem_in(n.clone(), td_th)) {
self.close_the_cell();
Reprocess(InRow, token)
} else {
self.unexpected(&token)
}
}
</body> </caption> </col> </colgroup> </html>
=> self.unexpected(&token),
tag @ </table> </tbody> </tfoot> </thead> </tr> => {
if self.in_scope_named(table_scope, tag.name.clone()) {
self.close_the_cell();
Reprocess(InRow, TagToken(tag))
} else {
self.unexpected(&tag)
}
}
token => self.step(InBody, token),
}),
//§ parsing-main-inselect
InSelect => match_token!(token {
NullCharacterToken => self.unexpected(&token),
CharacterTokens(_, text) => self.append_text(text),
CommentToken(text) => self.append_comment(text),
<html> => self.step(InBody, token),
tag @ <option> => {
if self.current_node_named(atom!(option)) {
self.pop();
}
self.insert_element_for(tag);
Done
}
tag @ <optgroup> => {
if self.current_node_named(atom!(option)) {
self.pop();
}
if self.current_node_named(atom!(optgroup)) {
self.pop();
}
self.insert_element_for(tag);
Done
}
</optgroup> => {
if self.open_elems.len() >= 2
&& self.current_node_named(atom!(option))
&& self.html_elem_named(self.open_elems.get(1).clone(),
atom!(optgroup)) {
self.pop();
}
if self.current_node_named(atom!(optgroup)) {
self.pop();
} else {
self.unexpected(&token);
}
Done
}
</option> => {
if self.current_node_named(atom!(option)) {
self.pop();
} else {
self.unexpected(&token);
}
Done
}
tag @ <select> </select> => {
let in_scope = self.in_scope_named(select_scope, atom!(select));
if !in_scope || tag.kind == StartTag {
self.unexpected(&tag);
}
if in_scope {
self.pop_until_named(atom!(select));
self.mode = self.reset_insertion_mode();
}
Done
}
<input> <keygen> <textarea> => {
self.unexpected(&token);
if self.in_scope_named(select_scope, atom!(select)) {
self.pop_until_named(atom!(select));
Reprocess(self.reset_insertion_mode(), token)
} else {
Done
}
}
<script> <template> </template> => self.step(InHead, token),
EOFToken => self.step(InBody, token),
token => self.unexpected(&token),
}),
//§ parsing-main-inselectintable
InSelectInTable => match_token!(token {
<caption> <table> <tbody> <tfoot> <thead> <tr> <td> <th> => {
self.unexpected(&token);
self.pop_until_named(atom!(select));
Reprocess(self.reset_insertion_mode(), token)
}
tag @ </caption> </table> </tbody> </tfoot> </thead> </tr> </td> </th> => {
self.unexpected(&tag);
if self.in_scope_named(table_scope, tag.name.clone()) {
self.pop_until_named(atom!(select));
Reprocess(self.reset_insertion_mode(), TagToken(tag))
} else {
Done
}
}
token => self.step(InSelect, token),
}),
//§ parsing-main-intemplate
InTemplate
=> fail!("FIXME: <template> not implemented"),
//§ parsing-main-afterbody
AfterBody => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => self.step(InBody, token),
CommentToken(text) => self.append_comment_to_html(text),
<html> => self.step(InBody, token),
</html> => {
if self.opts.fragment {
self.unexpected(&token);
} else {
self.mode = AfterAfterBody;
}
Done
}
EOFToken => self.stop_parsing(),
token => {
self.unexpected(&token);
Reprocess(InBody, token)
}
}),
//§ parsing-main-inframeset
InFrameset => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, text) => self.append_text(text),
CommentToken(text) => self.append_comment(text),
<html> => self.step(InBody, token),
tag @ <frameset> => {
self.insert_element_for(tag);
Done
}
</frameset> => {
if self.open_elems.len() == 1 {
self.unexpected(&token);
} else {
self.pop();
if !self.opts.fragment && !self.current_node_named(atom!(frameset)) {
self.mode = AfterFrameset;
}
}
Done
}
tag @ <frame> => {
self.insert_and_pop_element_for(tag);
DoneAckSelfClosing
}
<noframes> => self.step(InHead, token),
EOFToken => {
if self.open_elems.len() != 1 {
self.unexpected(&token);
}
self.stop_parsing()
}
token => self.unexpected(&token),
}),
//§ parsing-main-afterframeset
AfterFrameset => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, text) => self.append_text(text),
CommentToken(text) => self.append_comment(text),
<html> => self.step(InBody, token),
</html> => {
self.mode = AfterAfterFrameset;
Done
}
<noframes> => self.step(InHead, token),
EOFToken => self.stop_parsing(),
token => self.unexpected(&token),
}),
//§ the-after-after-body-insertion-mode
AfterAfterBody => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => self.step(InBody, token),
CommentToken(text) => self.append_comment_to_doc(text),
<html> => self.step(InBody, token),
EOFToken => self.stop_parsing(),
token => {
self.unexpected(&token);
Reprocess(InBody, token)
}
}),
//§ the-after-after-frameset-insertion-mode
AfterAfterFrameset => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => self.step(InBody, token),
CommentToken(text) => self.append_comment_to_doc(text),
<html> => self.step(InBody, token),
EOFToken => self.stop_parsing(),
<noframes> => self.step(InHead, token),
token => self.unexpected(&token),
}),
//§ END
}
}
}<|fim▁end|> | //§ the-before-html-insertion-mode
BeforeHtml => match_token!(token {
CharacterTokens(NotSplit, text) => SplitWhitespace(text),
CharacterTokens(Whitespace, _) => Done, |
<|file_name|>kube_features.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package features
import (
apiextensionsfeatures "k8s.io/apiextensions-apiserver/pkg/features"
genericfeatures "k8s.io/apiserver/pkg/features"
utilfeature "k8s.io/apiserver/pkg/util/feature"
)
const (
// Every feature gate should add method here following this template:
//
// // owner: @username
// // alpha: v1.X
// MyFeature utilfeature.Feature = "MyFeature"
// owner: @tallclair
// beta: v1.4
AppArmor utilfeature.Feature = "AppArmor"
// owner: @mtaufen
// alpha: v1.4
// beta: v1.11
DynamicKubeletConfig utilfeature.Feature = "DynamicKubeletConfig"
// owner: @pweil-
// alpha: v1.5
//
// Default userns=host for containers that are using other host namespaces, host mounts, the pod
// contains a privileged container, or specific non-namespaced capabilities (MKNOD, SYS_MODULE,
// SYS_TIME). This should only be enabled if user namespace remapping is enabled in the docker daemon.
ExperimentalHostUserNamespaceDefaultingGate utilfeature.Feature = "ExperimentalHostUserNamespaceDefaulting"
// owner: @vishh
// alpha: v1.5
//
// DEPRECATED - This feature is deprecated by Pod Priority and Preemption as of Kubernetes 1.13.
// Ensures guaranteed scheduling of pods marked with a special pod annotation `scheduler.alpha.kubernetes.io/critical-pod`
// and also prevents them from being evicted from a node.
// Note: This feature is not supported for `BestEffort` pods.
ExperimentalCriticalPodAnnotation utilfeature.Feature = "ExperimentalCriticalPodAnnotation"
// owner: @jiayingz
// beta: v1.10
//
// Enables support for Device Plugins
DevicePlugins utilfeature.Feature = "DevicePlugins"
// owner: @Huang-Wei
// beta: v1.13
//
// Changes the logic behind evicting Pods from not ready Nodes
// to take advantage of NoExecute Taints and Tolerations.
TaintBasedEvictions utilfeature.Feature = "TaintBasedEvictions"
// owner: @mikedanese
// alpha: v1.7
// beta: v1.12
//
// Gets a server certificate for the kubelet from the Certificate Signing
// Request API instead of generating one self signed and auto rotates the
// certificate as expiration approaches.
RotateKubeletServerCertificate utilfeature.Feature = "RotateKubeletServerCertificate"
// owner: @mikedanese
// beta: v1.8
//
// Automatically renews the client certificate used for communicating with
// the API server as the certificate approaches expiration.
RotateKubeletClientCertificate utilfeature.Feature = "RotateKubeletClientCertificate"
// owner: @msau42
// alpha: v1.7
//
// A new volume type that supports local disks on a node.
PersistentLocalVolumes utilfeature.Feature = "PersistentLocalVolumes"
// owner: @jinxu
// beta: v1.10
//
// New local storage types to support local storage capacity isolation
LocalStorageCapacityIsolation utilfeature.Feature = "LocalStorageCapacityIsolation"
// owner: @gnufied
// beta: v1.11
// Ability to Expand persistent volumes
ExpandPersistentVolumes utilfeature.Feature = "ExpandPersistentVolumes"
// owner: @mlmhl
// alpha: v1.11
// Ability to expand persistent volumes' file system without unmounting volumes.
ExpandInUsePersistentVolumes utilfeature.Feature = "ExpandInUsePersistentVolumes"
// owner: @verb
// alpha: v1.10
//
// Allows running a "debug container" in a pod namespaces to troubleshoot a running pod.
DebugContainers utilfeature.Feature = "DebugContainers"
// owner: @verb
// beta: v1.12
//
// Allows all containers in a pod to share a process namespace.
PodShareProcessNamespace utilfeature.Feature = "PodShareProcessNamespace"
// owner: @bsalamat
// alpha: v1.8
//
// Add priority to pods. Priority affects scheduling and preemption of pods.
PodPriority utilfeature.Feature = "PodPriority"
// owner: @k82cn
// beta: v1.12
//
// Taint nodes based on their condition status for 'NetworkUnavailable',
// 'MemoryPressure', 'OutOfDisk' and 'DiskPressure'.
TaintNodesByCondition utilfeature.Feature = "TaintNodesByCondition"
// owner: @jsafrane
// GA: v1.12
//
// Note: This feature gate is unconditionally enabled in v1.13 and will be removed in v1.14.
// Enable mount propagation of volumes.
MountPropagation utilfeature.Feature = "MountPropagation"
// owner: @sjenning
// alpha: v1.11
//
// Allows resource reservations at the QoS level preventing pods at lower QoS levels from
// bursting into resources requested at higher QoS levels (memory only for now)
QOSReserved utilfeature.Feature = "QOSReserved"
// owner: @ConnorDoyle
// alpha: v1.8
//
// Alternative container-level CPU affinity policies.
CPUManager utilfeature.Feature = "CPUManager"
// owner: @szuecs
// alpha: v1.12
//
// Enable nodes to change CPUCFSQuotaPeriod
CPUCFSQuotaPeriod utilfeature.Feature = "CustomCPUCFSQuotaPeriod"
// owner: @derekwaynecarr
// beta: v1.10
//
// Enable pods to consume pre-allocated huge pages of varying page sizes
HugePages utilfeature.Feature = "HugePages"
// owner: @sjenning
// beta: v1.11
//
// Enable pods to set sysctls on a pod
Sysctls utilfeature.Feature = "Sysctls"
// owner @brendandburns
// alpha: v1.9
//
// Enable nodes to exclude themselves from service load balancers
ServiceNodeExclusion utilfeature.Feature = "ServiceNodeExclusion"
// owner: @jsafrane
// alpha: v1.9
//
// Enable running mount utilities in containers.
MountContainers utilfeature.Feature = "MountContainers"
// owner: @msau42
// GA: v1.13
//
// Extend the default scheduler to be aware of PV topology and handle PV binding
VolumeScheduling utilfeature.Feature = "VolumeScheduling"
// owner: @vladimirvivien
// GA: v1.13
//
// Enable mount/attachment of Container Storage Interface (CSI) backed PVs
CSIPersistentVolume utilfeature.Feature = "CSIPersistentVolume"
// owner: @saad-ali
// alpha: v1.12
// Enable all logic related to the CSIDriver API object in csi.storage.k8s.io
CSIDriverRegistry utilfeature.Feature = "CSIDriverRegistry"
// owner: @verult
// alpha: v1.12
// Enable all logic related to the CSINodeInfo API object in csi.storage.k8s.io
CSINodeInfo utilfeature.Feature = "CSINodeInfo"
// owner @MrHohn
// beta: v1.10
//
// Support configurable pod DNS parameters.
CustomPodDNS utilfeature.Feature = "CustomPodDNS"
// owner: @screeley44
// alpha: v1.9
// beta: v1.13
//
// Enable Block volume support in containers.
BlockVolume utilfeature.Feature = "BlockVolume"
// owner: @pospispa
// GA: v1.11
//
// Postpone deletion of a PV or a PVC when they are being used
StorageObjectInUseProtection utilfeature.Feature = "StorageObjectInUseProtection"
// owner: @aveshagarwal
// alpha: v1.9
//
// Enable resource limits priority function
ResourceLimitsPriorityFunction utilfeature.Feature = "ResourceLimitsPriorityFunction"
// owner: @m1093782566
// GA: v1.11
//
// Implement IPVS-based in-cluster service load balancing
SupportIPVSProxyMode utilfeature.Feature = "SupportIPVSProxyMode"
// owner: @dims
// alpha: v1.10
//
// Implement support for limiting pids in pods
SupportPodPidsLimit utilfeature.Feature = "SupportPodPidsLimit"
// owner: @feiskyer
// alpha: v1.10
//
// Enable Hyper-V containers on Windows
HyperVContainer utilfeature.Feature = "HyperVContainer"
// owner: @k82cn
// beta: v1.12
//
// Schedule DaemonSet Pods by default scheduler instead of DaemonSet controller
ScheduleDaemonSetPods utilfeature.Feature = "ScheduleDaemonSetPods"
// owner: @mikedanese
// beta: v1.12
//
// Implement TokenRequest endpoint on service account resources.
TokenRequest utilfeature.Feature = "TokenRequest"
// owner: @mikedanese
// beta: v1.12
//
// Enable ServiceAccountTokenVolumeProjection support in ProjectedVolumes.
TokenRequestProjection utilfeature.Feature = "TokenRequestProjection"
// owner: @mikedanese
// alpha: v1.13
//
// Migrate ServiceAccount volumes to use a projected volume consisting of a
// ServiceAccountTokenVolumeProjection. This feature adds new required flags
// to the API server.
BoundServiceAccountTokenVolume utilfeature.Feature = "BoundServiceAccountTokenVolume"
// owner: @Random-Liu
// beta: v1.11
//
// Enable container log rotation for cri container runtime
CRIContainerLogRotation utilfeature.Feature = "CRIContainerLogRotation"
// owner: @verult
// GA: v1.13
//
// Enables the regional PD feature on GCE.
GCERegionalPersistentDisk utilfeature.Feature = "GCERegionalPersistentDisk"
// owner: @krmayankk
// alpha: v1.10
//
// Enables control over the primary group ID of containers' init processes.
RunAsGroup utilfeature.Feature = "RunAsGroup"
// owner: @saad-ali<|fim▁hole|> // Do not remove this feature gate even though it's GA
VolumeSubpath utilfeature.Feature = "VolumeSubpath"
// owner: @gnufied
// beta : v1.12
//
// Add support for volume plugins to report node specific
// volume limits
AttachVolumeLimit utilfeature.Feature = "AttachVolumeLimit"
// owner: @ravig
// alpha: v1.11
//
// Include volume count on node to be considered for balanced resource allocation while scheduling.
// A node which has closer cpu,memory utilization and volume count is favoured by scheduler
// while making decisions.
BalanceAttachedNodeVolumes utilfeature.Feature = "BalanceAttachedNodeVolumes"
// owner @freehan
// beta: v1.11
//
// Support Pod Ready++
PodReadinessGates utilfeature.Feature = "PodReadinessGates"
// owner: @kevtaylor
// alpha: v1.11
//
// Allow subpath environment variable substitution
// Only applicable if the VolumeSubpath feature is also enabled
VolumeSubpathEnvExpansion utilfeature.Feature = "VolumeSubpathEnvExpansion"
// owner: @vikaschoudhary16
// GA: v1.13
//
//
// Enable probe based plugin watcher utility for discovering Kubelet plugins
KubeletPluginsWatcher utilfeature.Feature = "KubeletPluginsWatcher"
// owner: @vikaschoudhary16
// beta: v1.12
//
//
// Enable resource quota scope selectors
ResourceQuotaScopeSelectors utilfeature.Feature = "ResourceQuotaScopeSelectors"
// owner: @vladimirvivien
// alpha: v1.11
//
// Enables CSI to use raw block storage volumes
CSIBlockVolume utilfeature.Feature = "CSIBlockVolume"
// owner: @tallclair
// alpha: v1.12
//
// Enables RuntimeClass, for selecting between multiple runtimes to run a pod.
RuntimeClass utilfeature.Feature = "RuntimeClass"
// owner: @mtaufen
// alpha: v1.12
//
// Kubelet uses the new Lease API to report node heartbeats,
// (Kube) Node Lifecycle Controller uses these heartbeats as a node health signal.
NodeLease utilfeature.Feature = "NodeLease"
// owner: @janosi
// alpha: v1.12
//
// Enables SCTP as new protocol for Service ports, NetworkPolicy, and ContainerPort in Pod/Containers definition
SCTPSupport utilfeature.Feature = "SCTPSupport"
// owner: @xing-yang
// alpha: v1.12
//
// Enable volume snapshot data source support.
VolumeSnapshotDataSource utilfeature.Feature = "VolumeSnapshotDataSource"
// owner: @jessfraz
// alpha: v1.12
//
// Enables control over ProcMountType for containers.
ProcMountType utilfeature.Feature = "ProcMountType"
// owner: @janetkuo
// alpha: v1.12
//
// Allow TTL controller to clean up Pods and Jobs after they finish.
TTLAfterFinished utilfeature.Feature = "TTLAfterFinished"
// owner: @dashpole
// alpha: v1.13
//
// Enables the kubelet's pod resources grpc endpoint
KubeletPodResources utilfeature.Feature = "KubeletPodResources"
)
func init() {
utilfeature.DefaultMutableFeatureGate.Add(defaultKubernetesFeatureGates)
}
// defaultKubernetesFeatureGates consists of all known Kubernetes-specific feature keys.
// To add a new feature, define a key for it above and add it here. The features will be
// available throughout Kubernetes binaries.
var defaultKubernetesFeatureGates = map[utilfeature.Feature]utilfeature.FeatureSpec{
AppArmor: {Default: true, PreRelease: utilfeature.Beta},
DynamicKubeletConfig: {Default: true, PreRelease: utilfeature.Beta},
ExperimentalHostUserNamespaceDefaultingGate: {Default: false, PreRelease: utilfeature.Beta},
ExperimentalCriticalPodAnnotation: {Default: false, PreRelease: utilfeature.Alpha},
DevicePlugins: {Default: true, PreRelease: utilfeature.Beta},
TaintBasedEvictions: {Default: true, PreRelease: utilfeature.Beta},
RotateKubeletServerCertificate: {Default: true, PreRelease: utilfeature.Beta},
RotateKubeletClientCertificate: {Default: true, PreRelease: utilfeature.Beta},
PersistentLocalVolumes: {Default: true, PreRelease: utilfeature.Beta},
LocalStorageCapacityIsolation: {Default: true, PreRelease: utilfeature.Beta},
HugePages: {Default: true, PreRelease: utilfeature.Beta},
Sysctls: {Default: true, PreRelease: utilfeature.Beta},
DebugContainers: {Default: false, PreRelease: utilfeature.Alpha},
PodShareProcessNamespace: {Default: true, PreRelease: utilfeature.Beta},
PodPriority: {Default: true, PreRelease: utilfeature.Beta},
TaintNodesByCondition: {Default: true, PreRelease: utilfeature.Beta},
MountPropagation: {Default: true, PreRelease: utilfeature.GA},
QOSReserved: {Default: false, PreRelease: utilfeature.Alpha},
ExpandPersistentVolumes: {Default: true, PreRelease: utilfeature.Beta},
ExpandInUsePersistentVolumes: {Default: false, PreRelease: utilfeature.Alpha},
AttachVolumeLimit: {Default: true, PreRelease: utilfeature.Beta},
CPUManager: {Default: true, PreRelease: utilfeature.Beta},
CPUCFSQuotaPeriod: {Default: false, PreRelease: utilfeature.Alpha},
ServiceNodeExclusion: {Default: false, PreRelease: utilfeature.Alpha},
MountContainers: {Default: false, PreRelease: utilfeature.Alpha},
VolumeScheduling: {Default: true, PreRelease: utilfeature.GA},
CSIPersistentVolume: {Default: true, PreRelease: utilfeature.GA},
CSIDriverRegistry: {Default: false, PreRelease: utilfeature.Alpha},
CSINodeInfo: {Default: false, PreRelease: utilfeature.Alpha},
CustomPodDNS: {Default: true, PreRelease: utilfeature.Beta},
BlockVolume: {Default: true, PreRelease: utilfeature.Beta},
StorageObjectInUseProtection: {Default: true, PreRelease: utilfeature.GA},
ResourceLimitsPriorityFunction: {Default: false, PreRelease: utilfeature.Alpha},
SupportIPVSProxyMode: {Default: true, PreRelease: utilfeature.GA},
SupportPodPidsLimit: {Default: false, PreRelease: utilfeature.Alpha},
HyperVContainer: {Default: false, PreRelease: utilfeature.Alpha},
ScheduleDaemonSetPods: {Default: true, PreRelease: utilfeature.Beta},
TokenRequest: {Default: true, PreRelease: utilfeature.Beta},
TokenRequestProjection: {Default: true, PreRelease: utilfeature.Beta},
BoundServiceAccountTokenVolume: {Default: false, PreRelease: utilfeature.Alpha},
CRIContainerLogRotation: {Default: true, PreRelease: utilfeature.Beta},
GCERegionalPersistentDisk: {Default: true, PreRelease: utilfeature.GA},
RunAsGroup: {Default: false, PreRelease: utilfeature.Alpha},
VolumeSubpath: {Default: true, PreRelease: utilfeature.GA},
BalanceAttachedNodeVolumes: {Default: false, PreRelease: utilfeature.Alpha},
PodReadinessGates: {Default: true, PreRelease: utilfeature.Beta},
VolumeSubpathEnvExpansion: {Default: false, PreRelease: utilfeature.Alpha},
KubeletPluginsWatcher: {Default: true, PreRelease: utilfeature.GA},
ResourceQuotaScopeSelectors: {Default: true, PreRelease: utilfeature.Beta},
CSIBlockVolume: {Default: false, PreRelease: utilfeature.Alpha},
RuntimeClass: {Default: false, PreRelease: utilfeature.Alpha},
NodeLease: {Default: true, PreRelease: utilfeature.Beta},
SCTPSupport: {Default: false, PreRelease: utilfeature.Alpha},
VolumeSnapshotDataSource: {Default: false, PreRelease: utilfeature.Alpha},
ProcMountType: {Default: false, PreRelease: utilfeature.Alpha},
TTLAfterFinished: {Default: false, PreRelease: utilfeature.Alpha},
KubeletPodResources: {Default: false, PreRelease: utilfeature.Alpha},
// inherited features from generic apiserver, relisted here to get a conflict if it is changed
// unintentionally on either side:
genericfeatures.StreamingProxyRedirects: {Default: true, PreRelease: utilfeature.Beta},
genericfeatures.AdvancedAuditing: {Default: true, PreRelease: utilfeature.GA},
genericfeatures.DynamicAuditing: {Default: false, PreRelease: utilfeature.Alpha},
genericfeatures.APIResponseCompression: {Default: false, PreRelease: utilfeature.Alpha},
genericfeatures.Initializers: {Default: false, PreRelease: utilfeature.Alpha},
genericfeatures.APIListChunking: {Default: true, PreRelease: utilfeature.Beta},
genericfeatures.DryRun: {Default: true, PreRelease: utilfeature.Beta},
// inherited features from apiextensions-apiserver, relisted here to get a conflict if it is changed
// unintentionally on either side:
apiextensionsfeatures.CustomResourceValidation: {Default: true, PreRelease: utilfeature.Beta},
apiextensionsfeatures.CustomResourceSubresources: {Default: true, PreRelease: utilfeature.Beta},
apiextensionsfeatures.CustomResourceWebhookConversion: {Default: false, PreRelease: utilfeature.Alpha},
// features that enable backwards compatibility but are scheduled to be removed
// ...
}<|fim▁end|> | // ga
//
// Allow mounting a subpath of a volume in a container |
<|file_name|>read_dump.py<|end_file_name|><|fim▁begin|>import os
import sys
import tnetstring
def read_packets (filename):
try:
os.stat (filename)
except OSError:
print "No such file : %s"%filename
sys.exit (1)
pkts = open (filename).read ()
pkts = tnetstring.loads (pkts, 'iso-8859-15')
for data in pkts:
yield data
if '__main__' == __name__:
if not sys.argv [1:]:
print "Usage: %s 'file'"%sys.argv [0]
sys.exit (0)
filename = sys.argv [1]<|fim▁hole|> for pkt in read_packets (filename):
print "found %d's len packet"%len (pkt)<|fim▁end|> | |
<|file_name|>compat.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>import types
try:
from collections import OrderedDict
except ImportError: # pragma: nocover
# Django < 1.5 fallback
from django.utils.datastructures import SortedDict as OrderedDict # noqa
# There is a bug with deepcopy in 2.6, patch if we are running python < 2.7
# http://bugs.python.org/issue1515
if version_info < (2, 7, 0):
def _deepcopy_method(x, memo):
return type(x)(x.im_func, copy.deepcopy(x.im_self, memo), x.im_class)
copy._deepcopy_dispatch[types.MethodType] = _deepcopy_method<|fim▁end|> | from sys import version_info
import copy
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate log;
extern crate env_logger;
extern crate clap;
extern crate serde;
extern crate serde_json;
extern crate time;
<|fim▁hole|>extern crate serde_derive;
pub mod manifest;
pub mod options;
pub mod profiler;
pub mod processor;
pub mod types;<|fim▁end|> | #[macro_use] |
<|file_name|>container_analysis_client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.devtools.containeranalysis.v1 ContainerAnalysis API."""
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.path_template
import grpc
from google.cloud.devtools.containeranalysis_v1.gapic import (
container_analysis_client_config,
)
from google.cloud.devtools.containeranalysis_v1.gapic.transports import (
container_analysis_grpc_transport,
)
from google.cloud.devtools.containeranalysis_v1.proto import containeranalysis_pb2_grpc
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import options_pb2
from google.iam.v1 import policy_pb2
from grafeas import grafeas_v1
from grafeas.grafeas_v1.gapic.transports import grafeas_grpc_transport
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
"google-cloud-containeranalysis"
).version
class ContainerAnalysisClient(object):
"""
Retrieves analysis results of Cloud components such as Docker container
images. The Container Analysis API is an implementation of the
`Grafeas <https://grafeas.io>`__ API.
Analysis results are stored as a series of occurrences. An
``Occurrence`` contains information about a specific analysis instance
on a resource. An occurrence refers to a ``Note``. A note contains
details describing the analysis and is generally stored in a separate
project, called a ``Provider``. Multiple occurrences can refer to the
same note.
For example, an SSL vulnerability could affect multiple images. In this
case, there would be one note for the vulnerability and an occurrence
for each image with the vulnerability referring to that note.
"""
SERVICE_ADDRESS = "containeranalysis.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.devtools.containeranalysis.v1.ContainerAnalysis"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ContainerAnalysisClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def note_path(cls, project, note):
"""Return a fully-qualified note string."""
return google.api_core.path_template.expand(
"projects/{project}/notes/{note}", project=project, note=note
)
@classmethod
def occurrence_path(cls, project, occurrence):
"""Return a fully-qualified occurrence string."""
return google.api_core.path_template.expand(
"projects/{project}/occurrences/{occurrence}",
project=project,
occurrence=occurrence,
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.ContainerAnalysisGrpcTransport,
Callable[[~.Credentials, type], ~.ContainerAnalysisGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):<|fim▁hole|> # Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = container_analysis_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=container_analysis_grpc_transport.ContainerAnalysisGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = container_analysis_grpc_transport.ContainerAnalysisGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
def get_grafeas_client(self):
"""Returns an equivalent grafeas client.
Returns:
A :class:`~grafeas.grafeas_v1.GrafeasClient` instance.
"""
grafeas_transport = grafeas_grpc_transport.GrafeasGrpcTransport(
self.SERVICE_ADDRESS, self.transport._OAUTH_SCOPES
)
return grafeas_v1.GrafeasClient(grafeas_transport)
# Service calls
def set_iam_policy(
self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Sets the access control policy on the specified note or occurrence.
Requires ``containeranalysis.notes.setIamPolicy`` or
``containeranalysis.occurrences.setIamPolicy`` permission if the
resource is a note or an occurrence, respectively.
The resource takes the format ``projects/[PROJECT_ID]/notes/[NOTE_ID]``
for notes and ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`` for
occurrences.
Example:
>>> from google.cloud.devtools import containeranalysis_v1
>>>
>>> client = containeranalysis_v1.ContainerAnalysisClient()
>>>
>>> resource = client.note_path('[PROJECT]', '[NOTE]')
>>>
>>> # TODO: Initialize `policy`:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
See the operation documentation for the appropriate value for this field.
policy (Union[dict, ~google.cloud.devtools.containeranalysis_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The
size of the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.devtools.containeranalysis_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.devtools.containeranalysis_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "set_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"set_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs["SetIamPolicy"].retry,
default_timeout=self._method_configs["SetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(resource=resource, policy=policy)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["set_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def get_iam_policy(
self,
resource,
options_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets the access control policy for a note or an occurrence resource.
Requires ``containeranalysis.notes.setIamPolicy`` or
``containeranalysis.occurrences.setIamPolicy`` permission if the
resource is a note or occurrence, respectively.
The resource takes the format ``projects/[PROJECT_ID]/notes/[NOTE_ID]``
for notes and ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`` for
occurrences.
Example:
>>> from google.cloud.devtools import containeranalysis_v1
>>>
>>> client = containeranalysis_v1.ContainerAnalysisClient()
>>>
>>> resource = client.note_path('[PROJECT]', '[NOTE]')
>>>
>>> response = client.get_iam_policy(resource)
Args:
resource (str): REQUIRED: The resource for which the policy is being requested.
See the operation documentation for the appropriate value for this field.
options_ (Union[dict, ~google.cloud.devtools.containeranalysis_v1.types.GetPolicyOptions]): OPTIONAL: A ``GetPolicyOptions`` object for specifying options to
``GetIamPolicy``. This field is only used by Cloud IAM.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.devtools.containeranalysis_v1.types.GetPolicyOptions`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.devtools.containeranalysis_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "get_iam_policy" not in self._inner_api_calls:
self._inner_api_calls[
"get_iam_policy"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_iam_policy,
default_retry=self._method_configs["GetIamPolicy"].retry,
default_timeout=self._method_configs["GetIamPolicy"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.GetIamPolicyRequest(
resource=resource, options=options_
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_iam_policy"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def test_iam_permissions(
self,
resource,
permissions,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Returns the permissions that a caller has on the specified note or
occurrence. Requires list permission on the project (for example,
``containeranalysis.notes.list``).
The resource takes the format ``projects/[PROJECT_ID]/notes/[NOTE_ID]``
for notes and ``projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`` for
occurrences.
Example:
>>> from google.cloud.devtools import containeranalysis_v1
>>>
>>> client = containeranalysis_v1.ContainerAnalysisClient()
>>>
>>> resource = client.note_path('[PROJECT]', '[NOTE]')
>>>
>>> # TODO: Initialize `permissions`:
>>> permissions = []
>>>
>>> response = client.test_iam_permissions(resource, permissions)
Args:
resource (str): REQUIRED: The resource for which the policy detail is being requested.
See the operation documentation for the appropriate value for this field.
permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see `IAM
Overview <https://cloud.google.com/iam/docs/overview#permissions>`__.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.devtools.containeranalysis_v1.types.TestIamPermissionsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if "test_iam_permissions" not in self._inner_api_calls:
self._inner_api_calls[
"test_iam_permissions"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.test_iam_permissions,
default_retry=self._method_configs["TestIamPermissions"].retry,
default_timeout=self._method_configs["TestIamPermissions"].timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource, permissions=permissions
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("resource", resource)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["test_iam_permissions"](
request, retry=retry, timeout=timeout, metadata=metadata
)<|fim▁end|> | Client options used to set user options on the client. API Endpoint
should be set through client_options.
""" |
<|file_name|>example.py<|end_file_name|><|fim▁begin|># Copyright (c) Microsoft Corporation 2015
from z3 import *
<|fim▁hole|>y = Real('y')
s = Solver()
s.add(x + y > 5, x > 1, y > 1)
print(s.check())
print(s.model())<|fim▁end|> | x = Real('x')
|
<|file_name|>pages.go<|end_file_name|><|fim▁begin|>package icarus
import (
"encoding/json"
"fmt"
"time"
)
type NoSuchPagesError struct {
msg string
Slugs []string
}
func (e *NoSuchPagesError) Error() string {
return e.msg
}
// Retrieve a list of slugs from Redis.
func PagesFromRedis(slugs []string) ([]*Page, error) {
pages := make([]*Page, 0, len(slugs))
keys := make([]string, 0, len(slugs))
for _, slug := range slugs {
p := &Page{Slug: slug}
pages = append(pages, p)
keys = append(keys, p.Key())
}
rc, err := GetRedisClient()
defer PutRedisClient(rc)
if err != nil {
return pages, fmt.Errorf("failed to connect to redis: %v", err)
}
if len(keys) == 0 {
return []*Page{}, nil
}
raws, err := rc.Cmd("MGET", keys).List()
nonEmpty := 0
for _, raw := range raws {
if raw != "" {
nonEmpty += 1
}
}
if err != nil || nonEmpty == 0 {
msg := fmt.Sprintf("failed retrieving slugs %v from redis: %v", slugs, err)
return pages, &NoSuchPagesError{msg, slugs}
}
for i, raw := range raws {
if err := json.Unmarshal([]byte(raw), pages[i]); err != nil {
return pages, err
}
}
return pages, nil
}
// Retrieve one page from Redis.
func PageFromRedis(slug string) (*Page, error) {
pages, err := PagesFromRedis([]string{slug})
if err != nil {
return nil, err
}
if len(pages) != 1 {
return nil, fmt.Errorf("retrieve none-one number of values for %v", slug)
}
return pages[0], nil
}
type Page struct {
Slug string `json:"slug"`
Tags []string `json:"tags"`
Title string `json:"title"`
Summary string `json:"summary"`
Content string `json:"html"`
Draft bool `json:"draft"`
PubDateStr int64 `json:"pub_date"`
EditDateStr int64 `json:"edit_date"`
}
// Generate the Redis key for this page.
func (p *Page) Key() string {
return "page." + p.Slug
}
// Synchronize this page to Redis.
func (p *Page) Sync() error {
rc, err := GetRedisClient()
defer PutRedisClient(rc)
<|fim▁hole|> }
asJSON, err := json.Marshal(p)
if err != nil {
return fmt.Errorf("failed marshalling page %v: %v", p.Slug, err)
}
_, err = rc.Cmd("SET", p.Key(), asJSON).Str()
if err != nil {
return err
}
if !p.Draft {
err := RegisterPage(p)
if err != nil {
return err
}
err = IndexPage(p)
if err != nil {
return err
}
} else {
err := UnregisterPage(p)
if err != nil {
return err
}
err = UnindexPage(p)
if err != nil {
return err
}
}
return nil
}
func (p *Page) getDate(date int64) time.Time {
t := time.Unix(date, 0)
return t
}
func (p *Page) PubDate() time.Time {
return p.getDate(p.PubDateStr)
}
func (p *Page) EditDate() time.Time {
return p.getDate(p.EditDateStr)
}
func (p *Page) InitPubDate() {
p.PubDateStr = CurrentTimestamp()
}
func (p *Page) InitEditDate() {
p.EditDateStr = CurrentTimestamp()
}
func (p *Page) EnsurePubDate() {
if p.PubDateStr == 0 {
p.InitPubDate()
}
}
func (p *Page) EnsureEditDate() {
if p.EditDateStr == 0 {
p.InitEditDate()
}
}<|fim▁end|> | if err != nil {
return fmt.Errorf("failed connecting to redis: %v", err) |
<|file_name|>SaveHttpEntityAdapter.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2016 CaMnter [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software<|fim▁hole|> * limitations under the License.
*/
package com.camnter.savevolley.network.adapter.core;
import com.camnter.savevolley.network.core.http.SaveHttpEntity;
/**
* Description:SaveHttpEntityAdapter
* Created by:CaMnter
* Time:2016-05-27 14:10
*/
public interface SaveHttpEntityAdapter<T> {
SaveHttpEntity adaptiveEntity(T t);
}<|fim▁end|> | * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>###############################################################
# Copyright 2020 Lawrence Livermore National Security, LLC
# (c.f. AUTHORS, NOTICE.LLNS, COPYING)
#<|fim▁hole|># SPDX-License-Identifier: LGPL-3.0
###############################################################
from flux.resource.Rlist import Rlist
from flux.resource.ResourceSet import ResourceSet<|fim▁end|> | # This file is part of the Flux resource manager framework.
# For details, see https://github.com/flux-framework.
# |
<|file_name|>main.py<|end_file_name|><|fim▁begin|>"""
MIT License
Copyright (c) 2017 Hajime Nakagami<[email protected]>
Copyright (c) 2019 Claude SIMON (https://q37.info/s/rmnmqd49)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# This is the adaptation of the program found on:
# https://gist.github.com/nakagami/7a7d799bd4bd4ad8fcea96135c4af179
import os, sys, random, itertools, time
os.chdir(os.path.dirname(os.path.realpath(__file__)))
sys.path.append("../../atlastk")
import atlastk
EMPTY = 0
BLACK = -1
WHITE = 1
# http://uguisu.skr.jp/othello/5-1.html
WEIGHT_MATRIX = [
[120, -20, 20, 5, 5, 20, -20, 120],
[-20, -40, -5, -5, -5, -5, -40, -20],
[20, -5, 15, 3, 3, 15, -5, 20],
[5, -5, 3, 3, 3, 3, -5, 5],
[5, -5, 3, 3, 3, 3, -5, 5],
[20, -5, 15, 3, 3, 15, -5, 20],
[-20, -40, -5, -5, -5, -5, -40, -20],
[120, -20, 20, 5, 5, 20, -20, 120],
]
class Reversi:
def reset(self):
self.board = []
for _ in range(8):
self.board.append([EMPTY] * 8)
self.board[3][3] = self.board[4][4] = BLACK
self.board[4][3] = self.board[3][4] = WHITE
def __init__(self, orig=None):
self.reset()
# copy constructor
if orig:
assert isinstance(orig, Reversi)
for i in range(8):
for j in range(8):
self.board[i][j] = orig.board[i][j]
def count(self, bwe):
"Count pieces or empty spaces in the board"
assert bwe in (BLACK, WHITE, EMPTY)
n = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bwe:
n += 1
return n
def _has_my_piece(self, bw, x, y, delta_x, delta_y):
"There is my piece in the direction of (delta_x, delta_y) from (x, y)."
assert bw in (BLACK, WHITE)
assert delta_x in (-1, 0, 1)
assert delta_y in (-1, 0, 1)
x += delta_x
y += delta_y
if x < 0 or x > 7 or y < 0 or y > 7 or self.board[x][y] == EMPTY:
return False
if self.board[x][y] == bw:
return True
return self._has_my_piece(bw, x, y, delta_x, delta_y)
def reversible_directions(self, bw, x, y):
"Can put piece on (x, y) ? Return list of reversible direction tuple"
assert bw in (BLACK, WHITE)
directions = []
if self.board[x][y] != EMPTY:
return directions
for d in itertools.product([-1, 1, 0], [-1, 1, 0]):
if d == (0, 0):
continue
nx = x + d[0]
ny = y + d[1]
if nx < 0 or nx > 7 or ny < 0 or ny > 7 or self.board[nx][ny] != bw * -1:
continue
if self._has_my_piece(bw, nx, ny, d[0], d[1]):
directions.append(d)
return directions
def _reverse_piece(self, bw, x, y, delta_x, delta_y):
"Reverse pieces in the direction of (delta_x, delta_y) from (x, y) untill bw."
assert bw in (BLACK, WHITE)
x += delta_x
y += delta_y
assert self.board[x][y] in (BLACK, WHITE)
if self.board[x][y] == bw:
return
self.board[x][y] = bw
return self._reverse_piece(bw, x, y, delta_x, delta_y)
def isAllowed(self, x, y, bw):
return len(self.reversible_directions(bw, x, y)) != 0
def put(self, x, y, bw):
"""
True: Put bw's piece on (x, y) and change board status.
False: Can't put bw's piece on (x, y)
"""
assert bw in (BLACK, WHITE)
directions = self.reversible_directions(bw, x, y)
if len(directions) == 0:
return False
self.board[x][y] = bw
for delta in directions:
self._reverse_piece(bw, x, y, delta[0], delta[1])
return True
def _calc_score(self, bw, weight_matrix):
assert bw in (BLACK, WHITE)
my_score = 0
against_score = 0
for i in range(8):
for j in range(8):
if self.board[i][j] == bw:
my_score += weight_matrix[i][j]
elif self.board[i][j] == bw * -1:
against_score += weight_matrix[i][j]
return my_score - against_score
def find_best_position(self, bw, weight_matrix):
"Return the best next position."
assert bw in (BLACK, WHITE)
next_positions = {}
for i in range(8):
for j in range(8):
reversi = Reversi(self)
if reversi.put(i, j, bw):
next_positions.setdefault(
reversi._calc_score(bw, weight_matrix), []
).append((i, j))
if next_positions:
next_position = random.choice(next_positions[max(next_positions)])
else:
next_position = None
return next_position
# -------------------------------------------------------------------------------
def drawBoard(reversi, dom, prefetch=False):
board = atlastk.createHTML("tbody")
for y, row in enumerate(reversi.board):
board.push_tag("tr")
for x, r in enumerate(row):
board.push_tag("td")
board.put_attribute("id", str(x) + str(y))
if (r == EMPTY) and (reversi.isAllowed(y, x, reversi.player)):
board.put_attribute("xdh:onevent", "Play")
if (prefetch == True):
r = reversi.player
board.put_attribute(
"style", "opacity: 0.1; background-color: white;")
board.put_attribute(
"class", {EMPTY: 'none', BLACK: 'black', WHITE: 'white'}[r])
board.pop_tag()
board.pop_tag()
dom.inner("board", board)
dom.set_values({
"black": reversi.count(BLACK),
"white": reversi.count(WHITE)
})
def acConnect(reversi, dom):
reversi.player = BLACK
reversi.weight_matrix = WEIGHT_MATRIX
dom.inner("", open("Main.html").read())
drawBoard(reversi, dom)
dom.alert("Welcome to this Reversi (aka Othello) game made with the Atlas toolkit.\n\nYou play against the computer with the black pieces.")
def acPlay(reversi, dom, id):
xy = [int(id[1]), int(id[0])]
player = reversi.player
weight_matrix = reversi.weight_matrix
<|fim▁hole|> if (reversi.put(xy[0], xy[1], player)):
drawBoard(reversi, dom, False)
xy = reversi.find_best_position(player * -1, weight_matrix)
if xy:
reversi.put(xy[0], xy[1], player * -1)
time.sleep(1)
drawBoard(reversi, dom)
if (reversi.count(EMPTY) == 0 or
reversi.count(BLACK) == 0 or
reversi.count(WHITE) == 0):
if reversi.count(player) > reversi.count(player * -1):
dom.alert('You win!')
elif reversi.count(player) < reversi.count(player * -1):
dom.alert('You lose!')
else:
dom.alert('Egality!')
def acNew(reversi, dom):
reversi.reset()
drawBoard(reversi, dom)
callbacks = {
"": acConnect,
"Play": acPlay,
"New": acNew
}
atlastk.launch(callbacks, Reversi, open("Head.html").read())<|fim▁end|> | |
<|file_name|>context.js<|end_file_name|><|fim▁begin|>var url = require('url')
, path = require('path')
, fs = require('fs')
, utils = require('./utils')
, EventEmitter = require('events').EventEmitter
exports = module.exports = Context
function Context(app, req, res) {
var self = this
this.app = app
this.req = req
this.res = res
this.done = this.done.bind(this)
EventEmitter.call(this)
var socket = res.socket
res.on('finish', done)
socket.on('error', done)
socket.on('close', done)
function done(err) {
res.removeListener('finish', done)
socket.removeListener('error', done)
socket.removeListener('close', done)
self.done(err)
}
}
Context.prototype = {
done: function(err) {
if (this._notifiedDone === true) return
if (err) {
if (this.writable) {
this.resHeaders = {}
this.type = 'text/plain'
this.status = err.code === 'ENOENT' ? 404 : (err.status || 500)
this.length = Buffer.byteLength(err.message)
this.res.end(err.message)
}
this.app.emit('error', err)
}
this._notifiedDone = true
this.emit('done', err)
},
throw: function(status, err) {
status = status || 500
err = err || {}
err.status = status
err.message = err.message || status.toString()<|fim▁hole|> var app = this.app
, viewPath = path.join(app.viewRoot, view)
, ext = path.extname(viewPath)
, exts, engine, content, testPath, i, j
if (!ext || (yield utils.fileExists(viewPath))) {
for (i = 0; app.viewEngines[i]; i++) {
exts = (app.viewEngines[i].exts || ['.' + app.viewEngines[i].name.toLowerCase()])
if (ext) {
if (~exts.indexOf(ext)) {
engine = app.viewEngines[i]
break
}
continue
}
for (j = 0; exts[j]; j++) {
testPath = viewPath + exts[j]
if (yield utils.fileExists(testPath)) {
viewPath = testPath
engine = app.viewEngines[i]
break
}
}
}
}
if (!engine) return this.throw(500, new Error('View does not exist'))
return yield engine.render(viewPath, locals)
},
/*
* opts: { path: ..., domain: ..., expires: ..., maxAge: ..., httpOnly: ..., secure: ..., sign: ... }
*/
cookie: function(name, val, opts) {
if (!opts) opts = {}
if (typeof val == 'object') val = JSON.stringify(val)
if (this.secret && opts.sign) {
val = this.app.cookies.prefix + this.app.cookies.sign(val, this.secret)
}
var headerVal = name + '=' + val + '; Path=' + (opts.path || '/')
if (opts.domain) headerVal += '; Domain=' + opts.domain
if (opts.expires) {
if (typeof opts.expires === 'number') opts.expires = new Date(opts.expires)
if (opts.expires instanceof Date) opts.expires = opts.expires.toUTCString()
headerVal += '; Expires=' + opts.expires
}
if (opts.maxAge) headerVal += '; Max-Age=' + opts.maxAge
if (opts.httpOnly) headerVal += '; HttpOnly'
if (opts.secure) headerVal += '; Secure'
this.setResHeader('Set-Cookie', headerVal)
},
get writable() {
var socket = this.res.socket
return socket && socket.writable && !this.res.headersSent
},
get path() {
return url.parse(this.url).pathname
},
set path(val) {
var obj = url.parse(this.url)
obj.pathname = val
this.url = url.format(obj)
},
get status() {
return this._status
},
set status(code) {
this._status = this.res.statusCode = code
},
get type() {
return this.getResHeader('Content-Type')
},
set type(val) {
if (val == null) return this.removeResHeader('Content-Type')
this.setResHeader('Content-Type', val)
},
get length() {
return this.getResHeader('Content-Length')
},
set length(val) {
if (val == null) return this.removeResHeader('Content-Length')
this.setResHeader('Content-Length', val)
},
get body() {
return this._body
},
set body(val) {
this._body = val
}
}
utils.extend(Context.prototype, EventEmitter.prototype)
utils.proxy(Context.prototype, {
req: {
method : 'access',
url : 'access',
secure : 'getter',
headers : ['getter', 'reqHeaders'],
},
res: {
_headers : ['access', 'resHeaders'],
getHeader : ['invoke', 'getResHeader'],
setHeader : ['invoke', 'setResHeader'],
removeHeader : ['invoke', 'removeResHeader']
}
})<|fim▁end|> | this.done(err)
},
render: function *(view, locals) { |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>"""Common utility for testing third party oauth2 features."""
import json
from base64 import b64encode
import httpretty
from onelogin.saml2.utils import OneLogin_Saml2_Utils
from oauth2_provider.models import Application
from social_core.backends.facebook import API_VERSION as FACEBOOK_API_VERSION
from social_core.backends.facebook import FacebookOAuth2
from social_django.models import Partial, UserSocialAuth
from student.tests.factories import UserFactory
from .testutil import ThirdPartyAuthTestMixin
@httpretty.activate
class ThirdPartyOAuthTestMixin(ThirdPartyAuthTestMixin):
"""
Mixin with tests for third party oauth views. A TestCase that includes<|fim▁hole|>
BACKEND: The name of the backend from python-social-auth
USER_URL: The URL of the endpoint that the backend retrieves user data from
UID_FIELD: The field in the user data that the backend uses as the user id
"""
social_uid = "test_social_uid"
access_token = "test_access_token"
client_id = "test_client_id"
CREATE_USER = True
def setUp(self):
super(ThirdPartyOAuthTestMixin, self).setUp()
if self.CREATE_USER:
self.user = UserFactory()
UserSocialAuth.objects.create(user=self.user, provider=self.BACKEND, uid=self.social_uid)
self.oauth_client = self._create_client()
if self.BACKEND == 'google-oauth2':
self.configure_google_provider(enabled=True, visible=True)
elif self.BACKEND == 'facebook':
self.configure_facebook_provider(enabled=True, visible=True)
def tearDown(self):
super(ThirdPartyOAuthTestMixin, self).tearDown()
Partial.objects.all().delete()
def _create_client(self):
"""
Create an OAuth2 client application
"""
return Application.objects.create(
client_id=self.client_id,
client_type=Application.CLIENT_PUBLIC,
)
def _setup_provider_response(self, success=False, email=''):
"""
Register a mock response for the third party user information endpoint;
success indicates whether the response status code should be 200 or 400
"""
if success:
status = 200
response = {self.UID_FIELD: self.social_uid}
if email:
response.update({'email': email})
body = json.dumps(response)
else:
status = 400
body = json.dumps({})
self._setup_provider_response_with_body(status, body)
def _setup_provider_response_with_body(self, status, body):
"""
Register a mock response for the third party user information endpoint with given status and body.
"""
httpretty.register_uri(
httpretty.GET,
self.USER_URL,
body=body,
status=status,
content_type="application/json",
)
class ThirdPartyOAuthTestMixinFacebook(object):
"""Tests oauth with the Facebook backend"""
BACKEND = "facebook"
USER_URL = FacebookOAuth2.USER_DATA_URL.format(version=FACEBOOK_API_VERSION)
# In facebook responses, the "id" field is used as the user's identifier
UID_FIELD = "id"
class ThirdPartyOAuthTestMixinGoogle(object):
"""Tests oauth with the Google backend"""
BACKEND = "google-oauth2"
USER_URL = "https://www.googleapis.com/oauth2/v3/userinfo"
# In google-oauth2 responses, the "email" field is used as the user's identifier
UID_FIELD = "email"
def read_and_pre_process_xml(file_name):
"""
Read XML file with the name specified in the argument and pre process the xml so that it can be parsed.
Pre Processing removes line retune characters (i.e. "\n").
Arguments:
file_name (str): Name of the XML file.
Returns:
(str): Pre Processed contents of the file.
"""
with open(file_name, 'r') as xml_file:
return xml_file.read().replace('\n', '')
def prepare_saml_response_from_xml(xml, relay_state='testshib'):
"""
Pre Process XML so that it can be used as a SAML Response coming from SAML IdP.
This method will perform the following operations on the XML in given order
1. base64 encode XML.
2. URL encode the base64 encoded data.
Arguments:
xml (string): XML data
relay_state (string): Relay State of the SAML Response
Returns:
(str): Base64 and URL encoded XML.
"""
b64encoded_xml = b64encode(xml.encode())
return 'RelayState={relay_state}&SAMLResponse={saml_response}'.format(
relay_state=OneLogin_Saml2_Utils.escape_url(relay_state),
saml_response=OneLogin_Saml2_Utils.escape_url(b64encoded_xml)
)<|fim▁end|> | this must define the following: |
<|file_name|>rotation_construction.rs<|end_file_name|><|fim▁begin|>use num::{One, Zero};
use simba::scalar::{ClosedAdd, ClosedMul};
use crate::base::allocator::Allocator;
use crate::base::dimension::DimName;
use crate::base::{DefaultAllocator, MatrixN, Scalar};
use crate::geometry::Rotation;
impl<N, D: DimName> Rotation<N, D>
where
N: Scalar + Zero + One,
DefaultAllocator: Allocator<N, D, D>,
{
/// Creates a new square identity rotation of the given `dimension`.
///
/// # Example
/// ```
/// # use nalgebra::Quaternion;
/// let rot1 = Quaternion::identity();
/// let rot2 = Quaternion::new(1.0, 2.0, 3.0, 4.0);
///
/// assert_eq!(rot1 * rot2, rot2);
/// assert_eq!(rot2 * rot1, rot2);
/// ```
#[inline]
pub fn identity() -> Rotation<N, D> {
Self::from_matrix_unchecked(MatrixN::<N, D>::identity())
}
}
impl<N, D: DimName> One for Rotation<N, D><|fim▁hole|>{
#[inline]
fn one() -> Self {
Self::identity()
}
}<|fim▁end|> | where
N: Scalar + Zero + One + ClosedAdd + ClosedMul,
DefaultAllocator: Allocator<N, D, D>, |
<|file_name|>dependency_info_unittest.py<|end_file_name|><|fim▁begin|># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from catapult_base.dependency_manager import dependency_info
class DependencyInfoTest(unittest.TestCase):
def testInitRequiredInfo(self):
# Must have a dependency, platform and file_path.
self.assertRaises(ValueError, dependency_info.DependencyInfo,
None, None, None)
self.assertRaises(ValueError, dependency_info.DependencyInfo,
'dep', None, None)
self.assertRaises(ValueError, dependency_info.DependencyInfo,
None, 'plat', None)
self.assertRaises(ValueError, dependency_info.DependencyInfo,
None, None, 'config_file')
# Empty DependencyInfo.
empty_di = dependency_info.DependencyInfo('dep', 'plat', 'config_file')
self.assertFalse(empty_di.cs_bucket)
self.assertFalse(empty_di.cs_hash)
self.assertFalse(empty_di.download_path)
self.assertFalse(empty_di.cs_remote_path)
self.assertFalse(empty_di.local_paths)
self.assertEqual('dep', empty_di.dependency)
self.assertEqual('plat', empty_di.platform)
self.assertEqual(['config_file'], empty_di.config_files)
def testInitLocalPaths(self):
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', local_paths=['path0', 'path1'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual(['path0', 'path1'], dep_info.local_paths)
self.assertFalse(dep_info.version_in_cs)
self.assertFalse(dep_info.cs_hash)
self.assertFalse(dep_info.cs_bucket)
self.assertFalse(dep_info.cs_remote_path)<|fim▁hole|> def testInitMinimumCloudStorageInfo(self):
# Must specify cloud storage information atomically.
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_b')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_hash='cs_hash')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_remote_path='cs_remote_path')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', download_path='download_path')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
cs_remote_path='cs_remote_path')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
cs_remote_path='cs_remote_path', local_paths=['path'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
download_path='download_path')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
download_path='download_path', local_paths=['path'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket',
cs_remote_path='cs_remote_path',
download_path='download_path')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_hash='cs_hash',
cs_remote_path='cs_remote_path',
download_path='download_path')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
download_path='download_path', local_paths=['path'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket',
cs_remote_path='cs_remote_path',
download_path='download_path', local_paths=['path'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_hash='cs_hash',
cs_remote_path='cs_remote_path',
download_path='download_path', local_paths=['path'])
def testInitWithVersion(self):
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', version_in_cs='version_in_cs')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', version_in_cs='version_in_cs',
local_paths=['path2'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
cs_remote_path='cs_remote_path',
version_in_cs='version_in_cs', local_paths=['path2'])
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', version_in_cs='version_in_cs')
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual('version_in_cs', dep_info.version_in_cs)
self.assertFalse(dep_info.local_paths)
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', version_in_cs='version_in_cs',
local_paths=['path'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual('version_in_cs', dep_info.version_in_cs)
self.assertEqual(['path'], dep_info.local_paths)
def testInitWithArchivePath(self):
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', path_within_archive='path_within_archive')
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', path_within_archive='path_within_archive',
local_paths=['path2'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
cs_remote_path='cs_remote_path',
path_within_archive='path_within_archive',
local_paths=['path2'])
self.assertRaises(ValueError, dependency_info.DependencyInfo, 'dep', 'plat',
'config_file', cs_bucket='cs_bucket', cs_hash='cs_hash',
cs_remote_path='cs_remote_path', version_in_cs='version',
path_within_archive='path_within_archive',
local_paths=['path2'])
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path',
path_within_archive='path_within_archive')
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual('path_within_archive', dep_info.path_within_archive)
self.assertFalse(dep_info.local_paths)
self.assertFalse(dep_info.version_in_cs)
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path',
path_within_archive='path_within_archive', local_paths=['path'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual('path_within_archive', dep_info.path_within_archive)
self.assertEqual(['path'], dep_info.local_paths)
self.assertFalse(dep_info.version_in_cs)
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', version_in_cs='version_in_cs',
path_within_archive='path_within_archive', local_paths=['path'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual('path_within_archive', dep_info.path_within_archive)
self.assertEqual(['path'], dep_info.local_paths)
self.assertEqual('version_in_cs', dep_info.version_in_cs)
def testInitCloudStorageInfo(self):
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path')
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertFalse(dep_info.version_in_cs)
self.assertFalse(dep_info.local_paths)
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', local_paths=['path'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertFalse(dep_info.version_in_cs)
self.assertEqual(['path'], dep_info.local_paths)
def testInitAllInfo(self):
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', local_paths=['path0', 'path1'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual(['path0', 'path1'], dep_info.local_paths)
self.assertFalse(dep_info.version_in_cs)
dep_info = dependency_info.DependencyInfo(
'dep', 'platform', 'config_file', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', version_in_cs='version_in_cs',
local_paths=['path0', 'path1'])
self.assertEqual('dep', dep_info.dependency)
self.assertEqual('platform', dep_info.platform)
self.assertEqual(['config_file'], dep_info.config_files)
self.assertEqual('cs_hash', dep_info.cs_hash)
self.assertEqual('cs_bucket', dep_info.cs_bucket)
self.assertEqual('cs_remote_path', dep_info.cs_remote_path)
self.assertEqual('download_path', dep_info.download_path)
self.assertEqual('version_in_cs', dep_info.version_in_cs)
self.assertEqual(['path0', 'path1'], dep_info.local_paths)
def testUpdateRequiredArgsConflicts(self):
dep_info1 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file1', local_paths=['path0', 'path1'])
dep_info2 = dependency_info.DependencyInfo(
'dep1', 'platform2', 'config_file2', local_paths=['path0', 'path2'])
dep_info3 = dependency_info.DependencyInfo(
'dep2', 'platform1', 'config_file3', local_paths=['path0', 'path3'])
self.assertRaises(ValueError, dep_info1.Update, dep_info2)
self.assertRaises(ValueError, dep_info1.Update, dep_info3)
self.assertRaises(ValueError, dep_info3.Update, dep_info2)
def testUpdateCloudStorageInfoNoVersions(self):
dep_info1 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file1')
dep_info2 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file2', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path')
dep_info3 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file3')
dep_info4 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file4', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path')
dep_info1.Update(dep_info2)
self.assertEqual('cs_bucket', dep_info1.cs_bucket)
self.assertEqual('cs_hash', dep_info1.cs_hash)
self.assertEqual('download_path', dep_info1.download_path)
self.assertEqual('cs_remote_path', dep_info1.cs_remote_path)
self.assertFalse(dep_info1.local_paths)
dep_info1.Update(dep_info3)
self.assertEqual('cs_bucket', dep_info1.cs_bucket)
self.assertEqual('cs_hash', dep_info1.cs_hash)
self.assertEqual('download_path', dep_info1.download_path)
self.assertEqual('cs_remote_path', dep_info1.cs_remote_path)
self.assertFalse(dep_info1.local_paths)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
def testUpdateCloudStorageInfoWithVersions(self):
dep_info1 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file1')
dep_info2 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file2', cs_bucket='cs_bucket2',
cs_hash='cs_hash2', download_path='download_path2',
cs_remote_path='cs_remote_path2', version_in_cs='2.1.1')
dep_info3 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file3')
dep_info4 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file4', cs_bucket='cs_bucket4',
cs_hash='cs_hash4', download_path='download_path4',
cs_remote_path='cs_remote_path4')
dep_info5 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file5', cs_bucket='cs_bucket5',
cs_hash='cs_hash5', download_path='download_path5',
cs_remote_path='cs_remote_path5')
dep_info1.Update(dep_info2)
self.assertEqual('cs_bucket2', dep_info1.cs_bucket)
self.assertEqual('cs_hash2', dep_info1.cs_hash)
self.assertEqual('download_path2', dep_info1.download_path)
self.assertEqual('cs_remote_path2', dep_info1.cs_remote_path)
self.assertEqual('2.1.1', dep_info1.version_in_cs)
self.assertFalse(dep_info1.local_paths)
dep_info1.Update(dep_info3)
self.assertEqual('cs_bucket2', dep_info1.cs_bucket)
self.assertEqual('cs_hash2', dep_info1.cs_hash)
self.assertEqual('download_path2', dep_info1.download_path)
self.assertEqual('cs_remote_path2', dep_info1.cs_remote_path)
self.assertEqual('2.1.1', dep_info1.version_in_cs)
self.assertFalse(dep_info1.local_paths)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
self.assertEqual('cs_bucket2', dep_info1.cs_bucket)
self.assertEqual('cs_hash2', dep_info1.cs_hash)
self.assertEqual('download_path2', dep_info1.download_path)
self.assertEqual('cs_remote_path2', dep_info1.cs_remote_path)
self.assertEqual('2.1.1', dep_info1.version_in_cs)
self.assertFalse(dep_info1.local_paths)
self.assertRaises(ValueError, dep_info1.Update, dep_info5)
def testUpdateAllInfo(self):
dep_info1 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file1', local_paths=['path1'])
dep_info2 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file2', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', local_paths=['path2'])
dep_info3 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file3', local_paths=['path3'])
dep_info4 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file4', cs_bucket='cs_bucket',
cs_hash='cs_hash', download_path='download_path',
cs_remote_path='cs_remote_path', local_paths=['path4'])
dep_info1.Update(dep_info2)
self.assertEqual('cs_bucket', dep_info1.cs_bucket)
self.assertEqual('cs_hash', dep_info1.cs_hash)
self.assertEqual('download_path', dep_info1.download_path)
self.assertEqual('cs_remote_path', dep_info1.cs_remote_path)
self.assertEqual(['path1', 'path2'], dep_info1.local_paths)
dep_info1.Update(dep_info3)
self.assertEqual('cs_bucket', dep_info1.cs_bucket)
self.assertEqual('cs_hash', dep_info1.cs_hash)
self.assertEqual('download_path', dep_info1.download_path)
self.assertEqual('cs_remote_path', dep_info1.cs_remote_path)
self.assertEqual(['path1', 'path2', 'path3'], dep_info1.local_paths)
self.assertRaises(ValueError, dep_info1.Update, dep_info4)
def testAppendConflictingLocalFiles(self):
dep_info1 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file1',
local_paths=['path0', 'path1', 'path3', 'path5', 'path6'])
dep_info2 = dependency_info.DependencyInfo(
'dep1', 'platform1', 'config_file2',
local_paths=['path0', 'path2', 'path4', 'path5'])
expected_local_paths = ['path0', 'path1', 'path3', 'path5', 'path6',
'path2', 'path4']
dep_info1.Update(dep_info2)
self.assertEquals(expected_local_paths, dep_info1.local_paths)<|fim▁end|> | self.assertFalse(dep_info.download_path)
self.assertFalse(dep_info.unzip_location)
self.assertFalse(dep_info.path_within_archive)
|
<|file_name|>TestPitchRollCoupling.py<|end_file_name|><|fim▁begin|>from LogAnalyzer import Test,TestResult
import DataflashLog
from VehicleType import VehicleType
import collections
class TestPitchRollCoupling(Test):
'''test for divergence between input and output pitch/roll, i.e. mechanical failure or bad PID tuning'''
# TODO: currently we're only checking for roll/pitch outside of max lean angle, will come back later to analyze roll/pitch in versus out values
def __init__(self):
Test.__init__(self)
self.name = "Pitch/Roll"
self.enable = True # TEMP
def run(self, logdata, verbose):
self.result = TestResult()
self.result.status = TestResult.StatusType.GOOD
if logdata.vehicleType != VehicleType.Copter:
self.result.status = TestResult.StatusType.NA
return
if not "ATT" in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No ATT log data"
return
if not "CTUN" in logdata.channels:
self.result.status = TestResult.StatusType.UNKNOWN
self.result.statusMessage = "No CTUN log data"
return
if "BarAlt" in logdata.channels['CTUN']:
self.ctun_baralt_att = 'BarAlt'
else:
self.ctun_baralt_att = 'BAlt'
# figure out where each mode begins and ends, so we can treat auto and manual modes differently and ignore acro/tune modes
autoModes = ["RTL",
"AUTO",
"LAND",
"LOITER",
"GUIDED",
"CIRCLE",
"OF_LOITER",
"POSHOLD",
"BRAKE",
"AVOID_ADSB",
"GUIDED_NOGPS",
"SMARTRTL"]
# use CTUN RollIn/DesRoll + PitchIn/DesPitch
manualModes = ["STABILIZE", "DRIFT", "ALTHOLD", "ALT_HOLD", "POSHOLD"]
# ignore data from these modes:
ignoreModes = ["ACRO", "SPORT", "FLIP", "AUTOTUNE","", "THROW",]
autoSegments = [] # list of (startLine,endLine) pairs
manualSegments = [] # list of (startLine,endLine) pairs
orderedModes = collections.OrderedDict(sorted(logdata.modeChanges.items(), key=lambda t: t[0]))
isAuto = False # we always start in a manual control mode
prevLine = 0
mode = ""
for line,modepair in orderedModes.iteritems():
mode = modepair[0].upper()
if prevLine == 0:
prevLine = line
if mode in autoModes:
if not isAuto:
manualSegments.append((prevLine,line-1))
prevLine = line
isAuto = True
elif mode in manualModes:
if isAuto:
autoSegments.append((prevLine,line-1))
prevLine = line
isAuto = False
elif mode in ignoreModes:
if isAuto:
autoSegments.append((prevLine,line-1))
else:
manualSegments.append((prevLine,line-1))
prevLine = 0
else:
raise Exception("Unknown mode in TestPitchRollCoupling: %s" % mode)
# and handle the last segment, which doesn't have an ending
if mode in autoModes:
autoSegments.append((prevLine,logdata.lineCount))
elif mode in manualModes:
manualSegments.append((prevLine,logdata.lineCount))
# figure out max lean angle, the ANGLE_MAX param was added in AC3.1
maxLeanAngle = 45.0
if "ANGLE_MAX" in logdata.parameters:
maxLeanAngle = logdata.parameters["ANGLE_MAX"] / 100.0
maxLeanAngleBuffer = 10 # allow a buffer margin
# ignore anything below this altitude, to discard any data while not flying
minAltThreshold = 2.0
# look through manual+auto flight segments
# TODO: filter to ignore single points outside range?
(maxRoll, maxRollLine) = (0.0, 0)
(maxPitch, maxPitchLine) = (0.0, 0)
for (startLine,endLine) in manualSegments+autoSegments:<|fim▁hole|> rollSeg = logdata.channels["ATT"]["Roll"].getSegment(startLine,endLine)
pitchSeg = logdata.channels["ATT"]["Pitch"].getSegment(startLine,endLine)
if not rollSeg.dictData and not pitchSeg.dictData:
continue
# check max roll+pitch for any time where relative altitude is above minAltThreshold
roll = max(abs(rollSeg.min()), abs(rollSeg.max()))
pitch = max(abs(pitchSeg.min()), abs(pitchSeg.max()))
if (roll>(maxLeanAngle+maxLeanAngleBuffer) and abs(roll)>abs(maxRoll)) or (pitch>(maxLeanAngle+maxLeanAngleBuffer) and abs(pitch)>abs(maxPitch)):
lit = DataflashLog.LogIterator(logdata, startLine)
assert(lit.currentLine == startLine)
while lit.currentLine <= endLine:
relativeAlt = lit["CTUN"][self.ctun_baralt_att]
if relativeAlt > minAltThreshold:
roll = lit["ATT"]["Roll"]
pitch = lit["ATT"]["Pitch"]
if abs(roll)>(maxLeanAngle+maxLeanAngleBuffer) and abs(roll)>abs(maxRoll):
maxRoll = roll
maxRollLine = lit.currentLine
if abs(pitch)>(maxLeanAngle+maxLeanAngleBuffer) and abs(pitch)>abs(maxPitch):
maxPitch = pitch
maxPitchLine = lit.currentLine
next(lit)
# check for breaking max lean angles
if maxRoll and abs(maxRoll)>abs(maxPitch):
self.result.status = TestResult.StatusType.FAIL
self.result.statusMessage = "Roll (%.2f, line %d) > maximum lean angle (%.2f)" % (maxRoll, maxRollLine, maxLeanAngle)
return
if maxPitch:
self.result.status = TestResult.StatusType.FAIL
self.result.statusMessage = "Pitch (%.2f, line %d) > maximum lean angle (%.2f)" % (maxPitch, maxPitchLine, maxLeanAngle)
return
# TODO: use numpy/scipy to check Roll+RollIn curves for fitness (ignore where we're not airborne)
# ...<|fim▁end|> | # quick up-front test, only fallover into more complex line-by-line check if max()>threshold |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""
KINCluster is clustering like KIN.
release note:
- version 0.1.6
fix settings
update pipeline<|fim▁hole|> now logging
- version 0.1.5.5
fix using custom settings
support both moudle and dict
- version 0.1.5.4
Update tokenizer, remove stopwords eff
- version 0.1.5.3
now custom setting available.
see settings.py
- version 0.1.5.2
change item, extractor, pipeline module
now, pipeline.dress_item pass just item(extractor.dump)
fix prev versions error (too many value to unpack)
"""
__version__ = '0.1.6'
__all__ = ['KINCluster',
'Cluster', 'Extractor', 'Item', 'Pipeline',
'tokenizer', 'stopwords']
from KINCluster.KINCluster import KINCluster
from KINCluster.core.cluster import Cluster
from KINCluster.core.extractor import Extractor
from KINCluster.core.item import Item
from KINCluster.core.pipeline import Pipeline
from KINCluster.lib.tokenizer import tokenizer
from KINCluster.lib.stopwords import stopwords<|fim▁end|> | delete unused arguments
fix convention by pylint |
<|file_name|>all_tests.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import sys
sys.path.extend(['.', '..'])
import unittest
<|fim▁hole|> 'test_c_lexer',
'test_c_ast',
'test_general',
'test_c_parser',
]
)
unittest.TextTestRunner(verbosity=1).run(suite)<|fim▁end|> |
suite = unittest.TestLoader().loadTestsFromNames(
[ |
<|file_name|>enhance.py<|end_file_name|><|fim▁begin|>#Function used to change all the O concepts of the words into the words themselves
def changeAllO(file, out):
w = open(out, "w")
for line in (open(file).readlines()):
v = line.split("\t")
if(len(v)>1):
if v[1][0:1] == "I" or v[1][0:1] == "B":
w.write(line)
else:
w.write(v[0] + "\t" + "$-"+str(v[0])+"\n")
else:
w.write("\n")
flag = 0<|fim▁hole|>
changeAllO("TRAIN.txt", "NLSPARQL.train.data")
changeAllO("TEST.txt", "NLSPARQL.test.data")<|fim▁end|> | w.close() |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>__author__ = 'mark'<|fim▁hole|>User Profile Extension based on One-to-One fields code in Django Docs here:
https://docs.djangoproject.com/en/1.7/topics/auth/customizing/
"""
from django.db import models
from django.contrib.auth.models import User
from uuid import uuid4
class Member(models.Model):
user = models.OneToOneField(User)
member_guid = models.CharField(max_length=100, null=True, blank=True)
ext_uid = models.CharField(max_length=100, null=True, blank=True)
user_token = models.CharField(max_length=100, null=True, blank=True)<|fim▁end|> | """ |
<|file_name|>default.rs<|end_file_name|><|fim▁begin|>// Type your code here, or load an example.
pub fn square(num: i32) -> i32 {
num * num
}
<|fim▁hole|>// If you use `main()`, declare it as `pub` to see it in the output:
// pub fn main() { ... }<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from distutils.core import setup
import pygiphy
VERSION = pygiphy.__version__
AUTHOR = pygiphy.__author__
setup_kwargs = {
'name': 'pygiphy',
'version': VERSION,
'url': 'https://github.com/MichaelYusko/PyGiphy',
'license': 'MIT',
'author': AUTHOR,
'author_email': '[email protected]',
'description': 'Python interface for the Giphy API',
'packages': ['pygiphy'],
'classifiers': [
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License'
],
}
requirements = ['requests>=2.13.0']
setup_kwargs['install_requires'] = requirements
setup(**setup_kwargs)
<|fim▁hole|><|fim▁end|> | print(u"\n\n\t\t "
"PyGiphy version {} installation succeeded.\n".format(VERSION)) |
<|file_name|>util.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | # proxy module
from apptools.logger.util import * |
<|file_name|>pbt_memnn_example.py<|end_file_name|><|fim▁begin|>"""Example training a memory neural net on the bAbI dataset.
References Keras and is based off of https://keras.io/examples/babi_memnn/.
"""
from __future__ import print_function
from tensorflow.keras.models import Sequential, Model, load_model
from tensorflow.keras.layers import Embedding
from tensorflow.keras.layers import (Input, Activation, Dense, Permute,
Dropout)
from tensorflow.keras.layers import add, dot, concatenate
from tensorflow.keras.layers import LSTM
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.utils import get_file
from tensorflow.keras.preprocessing.sequence import pad_sequences
from filelock import FileLock
import os
import argparse
import tarfile
import numpy as np
import re
from ray import tune
def tokenize(sent):
"""Return the tokens of a sentence including punctuation.
>>> tokenize("Bob dropped the apple. Where is the apple?")
["Bob", "dropped", "the", "apple", ".", "Where", "is", "the", "apple", "?"]
"""
return [x.strip() for x in re.split(r"(\W+)?", sent) if x and x.strip()]
def parse_stories(lines, only_supporting=False):
"""Parse stories provided in the bAbi tasks format
If only_supporting is true, only the sentences
that support the answer are kept.
"""
data = []
story = []
for line in lines:
line = line.decode("utf-8").strip()
nid, line = line.split(" ", 1)
nid = int(nid)
if nid == 1:
story = []
if "\t" in line:
q, a, supporting = line.split("\t")
q = tokenize(q)
if only_supporting:
# Only select the related substory
supporting = map(int, supporting.split())
substory = [story[i - 1] for i in supporting]
else:
# Provide all the substories
substory = [x for x in story if x]
data.append((substory, q, a))
story.append("")
else:
sent = tokenize(line)
story.append(sent)
return data
def get_stories(f, only_supporting=False, max_length=None):
"""Given a file name, read the file,
retrieve the stories,
and then convert the sentences into a single story.
If max_length is supplied,
any stories longer than max_length tokens will be discarded.
"""
def flatten(data):
return sum(data, [])
data = parse_stories(f.readlines(), only_supporting=only_supporting)
data = [(flatten(story), q, answer) for story, q, answer in data
if not max_length or len(flatten(story)) < max_length]
return data
def vectorize_stories(word_idx, story_maxlen, query_maxlen, data):
inputs, queries, answers = [], [], []
for story, query, answer in data:
inputs.append([word_idx[w] for w in story])
queries.append([word_idx[w] for w in query])
answers.append(word_idx[answer])
return (pad_sequences(inputs, maxlen=story_maxlen),
pad_sequences(queries, maxlen=query_maxlen), np.array(answers))
def read_data(finish_fast=False):
# Get the file
try:
path = get_file(
"babi-tasks-v1-2.tar.gz",
origin="https://s3.amazonaws.com/text-datasets/"<|fim▁hole|> except Exception:
print(
"Error downloading dataset, please download it manually:\n"
"$ wget http://www.thespermwhale.com/jaseweston/babi/tasks_1-20_v1-2" # noqa: E501
".tar.gz\n"
"$ mv tasks_1-20_v1-2.tar.gz ~/.keras/datasets/babi-tasks-v1-2.tar.gz" # noqa: E501
)
raise
# Choose challenge
challenges = {
# QA1 with 10,000 samples
"single_supporting_fact_10k": "tasks_1-20_v1-2/en-10k/qa1_"
"single-supporting-fact_{}.txt",
# QA2 with 10,000 samples
"two_supporting_facts_10k": "tasks_1-20_v1-2/en-10k/qa2_"
"two-supporting-facts_{}.txt",
}
challenge_type = "single_supporting_fact_10k"
challenge = challenges[challenge_type]
with tarfile.open(path) as tar:
train_stories = get_stories(tar.extractfile(challenge.format("train")))
test_stories = get_stories(tar.extractfile(challenge.format("test")))
if finish_fast:
train_stories = train_stories[:64]
test_stories = test_stories[:64]
return train_stories, test_stories
class MemNNModel(tune.Trainable):
def build_model(self):
"""Helper method for creating the model"""
vocab = set()
for story, q, answer in self.train_stories + self.test_stories:
vocab |= set(story + q + [answer])
vocab = sorted(vocab)
# Reserve 0 for masking via pad_sequences
vocab_size = len(vocab) + 1
story_maxlen = max(
len(x) for x, _, _ in self.train_stories + self.test_stories)
query_maxlen = max(
len(x) for _, x, _ in self.train_stories + self.test_stories)
word_idx = {c: i + 1 for i, c in enumerate(vocab)}
self.inputs_train, self.queries_train, self.answers_train = (
vectorize_stories(word_idx, story_maxlen, query_maxlen,
self.train_stories))
self.inputs_test, self.queries_test, self.answers_test = (
vectorize_stories(word_idx, story_maxlen, query_maxlen,
self.test_stories))
# placeholders
input_sequence = Input((story_maxlen, ))
question = Input((query_maxlen, ))
# encoders
# embed the input sequence into a sequence of vectors
input_encoder_m = Sequential()
input_encoder_m.add(Embedding(input_dim=vocab_size, output_dim=64))
input_encoder_m.add(Dropout(self.config.get("dropout", 0.3)))
# output: (samples, story_maxlen, embedding_dim)
# embed the input into a sequence of vectors of size query_maxlen
input_encoder_c = Sequential()
input_encoder_c.add(
Embedding(input_dim=vocab_size, output_dim=query_maxlen))
input_encoder_c.add(Dropout(self.config.get("dropout", 0.3)))
# output: (samples, story_maxlen, query_maxlen)
# embed the question into a sequence of vectors
question_encoder = Sequential()
question_encoder.add(
Embedding(
input_dim=vocab_size, output_dim=64,
input_length=query_maxlen))
question_encoder.add(Dropout(self.config.get("dropout", 0.3)))
# output: (samples, query_maxlen, embedding_dim)
# encode input sequence and questions (which are indices)
# to sequences of dense vectors
input_encoded_m = input_encoder_m(input_sequence)
input_encoded_c = input_encoder_c(input_sequence)
question_encoded = question_encoder(question)
# compute a "match" between the first input vector sequence
# and the question vector sequence
# shape: `(samples, story_maxlen, query_maxlen)`
match = dot([input_encoded_m, question_encoded], axes=(2, 2))
match = Activation("softmax")(match)
# add the match matrix with the second input vector sequence
response = add(
[match, input_encoded_c]) # (samples, story_maxlen, query_maxlen)
response = Permute(
(2, 1))(response) # (samples, query_maxlen, story_maxlen)
# concatenate the match matrix with the question vector sequence
answer = concatenate([response, question_encoded])
# the original paper uses a matrix multiplication.
# we choose to use a RNN instead.
answer = LSTM(32)(answer) # (samples, 32)
# one regularization layer -- more would probably be needed.
answer = Dropout(self.config.get("dropout", 0.3))(answer)
answer = Dense(vocab_size)(answer) # (samples, vocab_size)
# we output a probability distribution over the vocabulary
answer = Activation("softmax")(answer)
# build the final model
model = Model([input_sequence, question], answer)
return model
def setup(self, config):
with FileLock(os.path.expanduser("~/.tune.lock")):
self.train_stories, self.test_stories = read_data(
config["finish_fast"])
model = self.build_model()
rmsprop = RMSprop(
lr=self.config.get("lr", 1e-3), rho=self.config.get("rho", 0.9))
model.compile(
optimizer=rmsprop,
loss="sparse_categorical_crossentropy",
metrics=["accuracy"])
self.model = model
def step(self):
# train
self.model.fit(
[self.inputs_train, self.queries_train],
self.answers_train,
batch_size=self.config.get("batch_size", 32),
epochs=self.config.get("epochs", 1),
validation_data=([self.inputs_test, self.queries_test],
self.answers_test),
verbose=0)
_, accuracy = self.model.evaluate(
[self.inputs_train, self.queries_train],
self.answers_train,
verbose=0)
return {"mean_accuracy": accuracy}
def save_checkpoint(self, checkpoint_dir):
file_path = checkpoint_dir + "/model"
self.model.save(file_path)
return file_path
def load_checkpoint(self, path):
# See https://stackoverflow.com/a/42763323
del self.model
self.model = load_model(path)
if __name__ == "__main__":
import ray
from ray.tune.schedulers import PopulationBasedTraining
parser = argparse.ArgumentParser()
parser.add_argument(
"--smoke-test", action="store_true", help="Finish quickly for testing")
parser.add_argument(
"--server-address",
type=str,
default=None,
required=False,
help="The address of server to connect to if using "
"Ray Client.")
args, _ = parser.parse_known_args()
if args.smoke_test:
ray.init(num_cpus=2)
elif args.server_address:
ray.util.connect(args.server_address)
pbt = PopulationBasedTraining(
perturbation_interval=2,
hyperparam_mutations={
"dropout": lambda: np.random.uniform(0, 1),
"lr": lambda: 10**np.random.randint(-10, 0),
"rho": lambda: np.random.uniform(0, 1)
})
results = tune.run(
MemNNModel,
name="pbt_babi_memnn",
scheduler=pbt,
metric="mean_accuracy",
mode="max",
stop={"training_iteration": 4 if args.smoke_test else 100},
num_samples=2,
config={
"finish_fast": args.smoke_test,
"batch_size": 32,
"epochs": 1,
"dropout": 0.3,
"lr": 0.01,
"rho": 0.9
})<|fim▁end|> | "babi_tasks_1-20_v1-2.tar.gz") |
<|file_name|>test_googlenet.py<|end_file_name|><|fim▁begin|>from skimage.data import coffee, camera
from sklearn_theano.feature_extraction.caffe.googlenet import (
GoogLeNetTransformer, GoogLeNetClassifier)
import numpy as np
from nose import SkipTest
import os
co = coffee().astype(np.float32)
ca = camera().astype(np.float32)[:, :, np.newaxis] * np.ones((1, 1, 3),
dtype='float32')<|fim▁hole|>
def test_googlenet_transformer():
"""smoke test for googlenet transformer"""
if os.environ.get('CI', None) is not None:
raise SkipTest("Skipping heavy data loading on CI")
t = GoogLeNetTransformer()
t.transform(co)
t.transform(ca)
def test_googlenet_classifier():
"""smoke test for googlenet classifier"""
if os.environ.get('CI', None) is not None:
raise SkipTest("Skipping heavy data loading on CI")
c = GoogLeNetClassifier()
c.predict(co)
c.predict(ca)<|fim▁end|> | |
<|file_name|>test_settrie.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# coding: utf8
"""
Unit tests for module PySetTrie (see settrie.py).
Author: Márton Miháltz
https://sites.google.com/site/mmihaltz/
"""
import unittest
from settrie import SetTrie, SetTrieMap, SetTrieMultiMap
class TestSetTrie(unittest.TestCase):
"""
UnitTest for SetTrie class
"""
def setUp(self):
self.t = SetTrie([{1, 3}, {1, 3, 5}, {1, 4}, {1, 2, 4}, {2, 4}, {2, 3, 5}])
def test_print(self):
expected = """None
1
2
4#
3#
5#
4#
2
3
5#
4#
"""
from io import StringIO
outp = StringIO()
self.t.printtree(stream=outp)
self.assertEqual(outp.getvalue(), expected)
def test_iter(self):
a = []
for s in self.t:
a.append(s)
self.assertEqual(a, [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}])
def test_iter2(self):
it = iter(self.t)
for s in it:
pass
self.assertRaises(StopIteration, it.__next__)
def test_iter3(self):
t2 = SetTrie()
it = iter(t2)
self.assertRaises(StopIteration, it.__next__)
def test_aslist(self):
self.assertEqual(self.t.aslist(), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}])
def test_str(self):
self.assertEqual(str(self.t), "[{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}]")
def test_contains(self):
self.assertTrue(self.t.contains( {1, 3} ))
self.assertFalse(self.t.contains( {1} ))
self.assertTrue(self.t.contains( {1, 3, 5} ))
self.assertFalse(self.t.contains( {1, 3, 5, 7} ))
def test_in(self):
self.assertTrue({1, 3} in self.t)
self.assertFalse({1} in self.t)
self.assertTrue({1, 3, 5} in self.t)
self.assertFalse({1, 3, 5, 7} in self.t)
def test_hassuperset(self):
self.assertTrue(self.t.hassuperset({3, 5}))
self.assertFalse(self.t.hassuperset({6}))
self.assertTrue(self.t.hassuperset({1, 2, 4}))
self.assertFalse(self.t.hassuperset({2, 4, 5} ))
def test_supersets(self):
self.assertEqual(self.t.supersets({3, 5}), [{1, 3, 5}, {2, 3, 5}])
self.assertEqual(self.t.supersets({1, 4}), [{1, 2, 4}, {1, 4}])
self.assertEqual(self.t.supersets({1, 3, 5}), [{1, 3, 5}])
self.assertEqual(self.t.supersets({2}), [{1, 2, 4}, {2, 3, 5}, {2, 4}])
self.assertEqual(self.t.supersets({1}), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}])
self.assertEqual(self.t.supersets({1, 2, 5}), [])
self.assertEqual(self.t.supersets({1, 2, 4, 5}), [])
self.assertEqual(self.t.supersets({6}), [])
def test_hassubset(self):
self.assertTrue(self.t.hassubset({1, 2, 3}))
self.assertTrue(self.t.hassubset({2, 3, 4, 5}))
self.assertTrue(self.t.hassubset({1, 4}))
self.assertTrue(self.t.hassubset({2, 3, 5}))
self.assertFalse(self.t.hassubset({3, 4, 5}))
self.assertFalse(self.t.hassubset({6, 7, 8, 9, 1000}))
def test_subsets(self):
self.assertEqual(self.t.subsets({1, 2, 4, 11}), [{1, 2, 4}, {1, 4}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2, 4}), [{1, 2, 4}, {1, 4}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2}), [])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}])
self.assertEqual(self.t.subsets({0, 1, 3, 5}), [{1, 3}, {1, 3, 5}])
self.assertEqual(self.t.subsets({1, 2, 5}), [])
self.assertEqual(self.t.subsets({1, 4}), [{1, 4}]) # :)
self.assertEqual(self.t.subsets({1, 3, 5}), [{1, 3}, {1, 3, 5}])
self.assertEqual(self.t.subsets({1, 3, 5, 111}), [{1, 3}, {1, 3, 5}])
self.assertEqual(self.t.subsets({1, 4, 8}), [{1, 4}])
self.assertEqual(self.t.subsets({2, 3, 4, 5}), [{2, 3, 5}, {2, 4}])
self.assertEqual(self.t.subsets({2, 3, 5, 6}), [{2, 3, 5}])
class TestSetTrieMap(unittest.TestCase):
"""
UnitTest for SetTrieMap class
"""
def setUp(self):
self.t = SetTrieMap([({1, 3}, 'A'), ({1, 3, 5}, 'B'), ({1, 4}, 'C'),
({1, 2, 4}, 'D'), ({2, 4}, 'E'), ({2, 3, 5}, 'F')])
#self.t.printtree()
def test_print(self):
expected = """None
1
2
4: 'D'
3: 'A'
5: 'B'
4: 'C'
2
3
5: 'F'
4: 'E'
"""
from io import StringIO
outp = StringIO()
self.t.printtree(stream=outp)
self.assertEqual(outp.getvalue(), expected)
def test_contains(self):
self.assertTrue(self.t.contains( {1, 3} ))
self.assertFalse(self.t.contains( {1} ))
self.assertTrue(self.t.contains( {1, 3, 5} ))
self.assertFalse(self.t.contains( {1, 3, 5, 7} ))
def test_in(self):
self.assertTrue({1, 3} in self.t)
self.assertFalse({1} in self.t)
self.assertTrue({1, 3, 5} in self.t)
self.assertFalse({1, 3, 5, 7} in self.t)
<|fim▁hole|> self.assertEqual(self.t.get({1, 2, 4}), 'D')
self.assertEqual(self.t.get({2, 4}), 'E')
self.assertEqual(self.t.get({2, 3, 5}), 'F')
self.assertEqual(self.t.get({1, 2, 3}), None)
self.assertEqual(self.t.get({100, 101, 102}, 0xDEADBEEF), 0xDEADBEEF)
self.assertEqual(self.t.get({}), None)
def test_assign(self):
self.assertEqual(self.t.get({1, 3}), 'A')
self.t.assign({1, 3}, 'AAA')
self.assertEqual(self.t.get({1, 3}), 'AAA')
self.assertEqual(self.t.get({100, 200}), None)
self.t.assign({100, 200}, 'FOO')
self.assertEqual(self.t.get({100, 200}), 'FOO')
self.setUp()
def test_hassuperset(self):
self.assertTrue(self.t.hassuperset({3, 5}))
self.assertFalse(self.t.hassuperset({6}))
self.assertTrue(self.t.hassuperset({1, 2, 4}))
self.assertFalse(self.t.hassuperset({2, 4, 5} ))
def test_supersets(self):
self.assertEqual(self.t.supersets({3, 5}), [({1, 3, 5}, 'B'), ({2, 3, 5}, 'F')])
self.assertEqual(self.t.supersets({1}), [({1, 2, 4}, 'D'), ({1, 3}, 'A'), ({1, 3, 5}, 'B'), ({1, 4}, 'C')])
self.assertEqual(self.t.supersets({1, 2, 5}), [])
self.assertEqual(self.t.supersets({3, 5}, mode='keys'), [{1, 3, 5}, {2, 3, 5}])
self.assertEqual(self.t.supersets({1}, mode='keys'), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}])
self.assertEqual(self.t.supersets({1, 2, 5}, mode='keys'), [])
self.assertEqual(self.t.supersets({3, 5}, mode='values'), ['B', 'F'])
self.assertEqual(self.t.supersets({1}, mode='values'), ['D', 'A', 'B', 'C'])
self.assertEqual(self.t.supersets({1, 2, 5}, mode='values'), [])
def test_hassubset(self):
self.assertTrue(self.t.hassubset({1, 2, 3}))
self.assertTrue(self.t.hassubset({2, 3, 4, 5}))
self.assertTrue(self.t.hassubset({1, 4}))
self.assertTrue(self.t.hassubset({2, 3, 5}))
self.assertFalse(self.t.hassubset({3, 4, 5}))
self.assertFalse(self.t.hassubset({6, 7, 8, 9, 1000}))
def test_subsets(self):
self.assertEqual(self.t.subsets({1, 2, 4, 11}), [({1, 2, 4}, 'D'), ({1, 4}, 'C'), ({2, 4}, 'E')])
self.assertEqual(self.t.subsets({1, 2, 4}), [({1, 2, 4}, 'D'), ({1, 4}, 'C'), ({2, 4}, 'E')])
self.assertEqual(self.t.subsets({1, 2}), [])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}), [({1, 2, 4}, 'D'),
({1, 3}, 'A'),
({1, 3, 5}, 'B'),
({1, 4}, 'C'),
({2, 3, 5}, 'F'),
({2, 4}, 'E')] )
self.assertEqual(self.t.subsets({0, 1, 3, 5}), [({1, 3}, 'A'), ({1, 3, 5}, 'B')])
self.assertEqual(self.t.subsets({1, 2, 5}), [])
self.assertEqual(self.t.subsets({1, 2, 4, 11}, mode='keys'), [{1, 2, 4}, {1, 4}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2, 4}, mode='keys'), [{1, 2, 4}, {1, 4}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2}, mode='keys'), [])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}, mode='keys'), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}])
self.assertEqual(self.t.subsets({0, 1, 3, 5}, mode='keys'), [{1, 3}, {1, 3, 5}])
self.assertEqual(self.t.subsets({1, 2, 5}, mode='keys'), [])
self.assertEqual(self.t.subsets({1, 2, 4, 11}, mode='values'), ['D', 'C', 'E'])
self.assertEqual(self.t.subsets({1, 2, 4}, mode='values'), ['D', 'C', 'E'])
self.assertEqual(self.t.subsets({1, 2}, mode='values'), [])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}, mode='values'), ['D', 'A', 'B', 'C', 'F', 'E'])
self.assertEqual(self.t.subsets({0, 1, 3, 5}, mode='values'), ['A', 'B'])
self.assertEqual(self.t.subsets({1, 2, 5}, mode='values'), [])
self.assertEqual(self.t.subsets({1, 4}), [({1, 4}, 'C')])
self.assertEqual(self.t.subsets({1, 3, 5}), [({1, 3}, 'A'), ({1, 3, 5}, 'B')])
self.assertEqual(self.t.subsets({1, 3, 5, 111}), [({1, 3}, 'A'), ({1, 3, 5}, 'B')])
self.assertEqual(self.t.subsets({1, 4, 8}), [({1, 4}, 'C')])
self.assertEqual(self.t.subsets({2, 3, 4, 5}), [({2, 3, 5}, 'F'), ({2, 4}, 'E')])
self.assertEqual(self.t.subsets({2, 3, 5, 6}), [({2, 3, 5}, 'F')])
def test_iters(self):
self.assertEqual(self.t.aslist(),
[({1, 2, 4}, 'D'), ({1, 3}, 'A'), ({1, 3, 5}, 'B'), ({1, 4}, 'C'), ({2, 3, 5}, 'F'), ({2, 4}, 'E')] )
self.assertEqual(list(self.t.keys()), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}] )
self.assertEqual(list(self.t.values()), ['D', 'A', 'B', 'C', 'F', 'E'] )
self.assertEqual(list(self.t.__iter__()), list(self.t.keys()))
class TestSetTrieMultiMap(unittest.TestCase):
"""
UnitTest for SetTrieMultiMap class
"""
def setUp(self):
self.t = SetTrieMultiMap([({1, 3}, 'A'), ({1, 3}, 'AA'), ({1, 3, 5}, 'B'), ({1, 4}, 'C'), ({1, 4}, 'CC'),
({1, 2, 4}, 'D'), ({1, 2, 4}, 'DD'), ({2, 4}, 'E'), ({2, 3, 5}, 'F'),
({2, 3, 5}, 'FF'), ({2, 3, 5}, 'FFF')])
def test_aslist(self):
self.assertEqual(self.t.aslist(),
[({1, 2, 4}, 'D'), ({1, 2, 4}, 'DD'), ({1, 3}, 'A'), ({1, 3}, 'AA'), ({1, 3, 5}, 'B'),
({1, 4}, 'C'), ({1, 4}, 'CC'), ({2, 3, 5}, 'F'), ({2, 3, 5}, 'FF'), ({2, 3, 5}, 'FFF'), ({2, 4}, 'E')] )
def test_assign_returned_value(self):
x = SetTrieMultiMap()
self.assertEqual(x.assign({1, 3}, 'A'), 1)
self.assertEqual(x.assign({1, 3}, 'AA'), 2)
self.assertEqual(x.assign({1, 3}, 'A'), 3)
self.assertEqual(x.assign({2, 4, 5}, 'Y'), 1)
def test_count(self):
self.assertEqual(self.t.count({1, 3}), 2)
self.assertEqual(self.t.count({1, 3, 5}), 1)
self.assertEqual(self.t.count({1, 3, 4}), 0)
self.assertEqual(self.t.count({111, 222}), 0)
self.assertEqual(self.t.count({2, 3, 5}), 3)
def test_iterget(self):
self.assertEqual(list(self.t.iterget({1, 3})), ['A', 'AA'])
self.assertEqual(list(self.t.iterget({1, 3, 4})), [])
def test_get(self):
self.assertEqual(self.t.get({1, 3}), ['A', 'AA'])
self.assertEqual(self.t.get({1, 2, 4}), ['D', 'DD'])
self.assertEqual(self.t.get({1, 3, 5}), ['B'])
self.assertEqual(self.t.get({2, 3, 5}), ['F', 'FF', 'FFF'])
self.assertEqual(self.t.get({2, 4}), ['E'])
self.assertEqual(self.t.get({1, 3, 4}), None)
self.assertEqual(self.t.get({44}, []), [])
def test_hassuperset(self):
self.assertTrue(self.t.hassuperset({3, 5}))
self.assertFalse(self.t.hassuperset({6}))
self.assertTrue(self.t.hassuperset({1, 2, 4}))
self.assertFalse(self.t.hassuperset({2, 4, 5} ))
def test_supersets(self):
self.assertEqual(self.t.supersets({3, 5}), [({1, 3, 5}, 'B'), ({2, 3, 5}, 'F'), ({2, 3, 5}, 'FF'), ({2, 3, 5}, 'FFF')])
self.assertEqual(self.t.supersets({3, 5}, mode='values'), ['B', 'F', 'FF', 'FFF'])
self.assertEqual(self.t.supersets({3, 5}, mode='keys'), [{1, 3, 5}, {2, 3, 5}])
self.assertEqual(self.t.supersets({1}), [({1, 2, 4}, 'D'), ({1, 2, 4}, 'DD'), ({1, 3}, 'A'),
({1, 3}, 'AA'), ({1, 3, 5}, 'B'), ({1, 4}, 'C'), ({1, 4}, 'CC')] )
self.assertEqual(self.t.supersets({1}, mode='keys'), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}])
self.assertEqual(self.t.supersets({1}, mode='values'), ['D', 'DD', 'A', 'AA', 'B', 'C', 'CC'])
self.assertEqual(self.t.supersets({1, 2, 5}), [])
self.assertEqual(self.t.supersets({1, 2, 5}, mode='keys'), [])
self.assertEqual(self.t.supersets({1, 2, 5}, mode='values'), [])
def test_hassubset(self):
self.assertTrue(self.t.hassubset({1, 2, 3}))
self.assertTrue(self.t.hassubset({2, 3, 4, 5}))
self.assertTrue(self.t.hassubset({1, 4}))
self.assertTrue(self.t.hassubset({2, 3, 5}))
self.assertFalse(self.t.hassubset({3, 4, 5}))
self.assertFalse(self.t.hassubset({6, 7, 8, 9, 1000}))
def test_subsets(self):
self.assertEqual(self.t.subsets({1, 2, 4, 11}), [({1, 2, 4}, 'D'), ({1, 2, 4}, 'DD'), ({1, 4}, 'C'),
({1, 4}, 'CC'), ({2, 4}, 'E')] )
self.assertEqual(self.t.subsets({1, 2, 4, 11}, mode='keys'), [{1, 2, 4}, {1, 4}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2, 4, 11}, mode='values'), ['D', 'DD', 'C', 'CC', 'E'])
self.assertEqual(self.t.subsets({1, 2, 4}), [({1, 2, 4}, 'D'), ({1, 2, 4}, 'DD'), ({1, 4}, 'C'), ({1, 4}, 'CC'),
({2, 4}, 'E')])
self.assertEqual(self.t.subsets({1, 2, 4}, mode='keys'), [{1, 2, 4}, {1, 4}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2, 4}, mode='values'), ['D', 'DD', 'C', 'CC', 'E'])
self.assertEqual(self.t.subsets({1, 2}), [])
self.assertEqual(self.t.subsets({1, 2}, mode='keys'), [])
self.assertEqual(self.t.subsets({1, 2}, mode='values'), [])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}),
[({1, 2, 4}, 'D'), ({1, 2, 4}, 'DD'), ({1, 3}, 'A'), ({1, 3}, 'AA'), ({1, 3, 5}, 'B'),
({1, 4}, 'C'), ({1, 4}, 'CC'), ({2, 3, 5}, 'F'), ({2, 3, 5}, 'FF'), ({2, 3, 5}, 'FFF'), ({2, 4}, 'E')] )
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}), self.t.aslist())
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}, mode='keys'), list(self.t.keys()))
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}, mode='keys'), [{1, 2, 4}, {1, 3}, {1, 3, 5}, {1, 4}, {2, 3, 5}, {2, 4}])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}, mode='values'),
['D', 'DD', 'A', 'AA', 'B', 'C', 'CC', 'F', 'FF', 'FFF', 'E'])
self.assertEqual(self.t.subsets({1, 2, 3, 4, 5}, mode='values'), list(self.t.values()))
self.assertEqual(self.t.subsets({0, 1, 3, 5}), [({1, 3}, 'A'), ({1, 3}, 'AA'), ({1, 3, 5}, 'B')])
self.assertEqual(self.t.subsets({0, 1, 3, 5}, mode='keys'), [{1, 3}, {1, 3, 5}])
self.assertEqual(self.t.subsets({0, 1, 3, 5}, mode='values'), ['A', 'AA', 'B'])
self.assertEqual(self.t.subsets({1, 2, 5}), [])
self.assertEqual(self.t.subsets({1, 2, 5}, mode='keys'), [])
self.assertEqual(self.t.subsets({1, 2, 5}, mode='values'), [])
self.assertEqual(self.t.subsets({1, 4}), [({1, 4}, 'C'), ({1, 4}, 'CC')])
self.assertEqual(self.t.subsets({1, 3, 5}), [({1, 3}, 'A'), ({1, 3}, 'AA'), ({1, 3, 5}, 'B')])
self.assertEqual(self.t.subsets({1, 3, 5, 111}), [({1, 3}, 'A'), ({1, 3}, 'AA'), ({1, 3, 5}, 'B')])
self.assertEqual(self.t.subsets({1, 4, 8}), [({1, 4}, 'C'), ({1, 4}, 'CC')])
self.assertEqual(self.t.subsets({2, 3, 4, 5}), [({2, 3, 5}, 'F'), ({2, 3, 5}, 'FF'), ({2, 3, 5}, 'FFF'), ({2, 4}, 'E')])
self.assertEqual(self.t.subsets({2, 3, 5, 6}), [({2, 3, 5}, 'F'), ({2, 3, 5}, 'FF'), ({2, 3, 5}, 'FFF')])
# - - - - - - -
# If module is executed from command line, perform tests:
if __name__ == "__main__":
unittest.main(verbosity=2)<|fim▁end|> | def test_get(self):
self.assertEqual(self.t.get({1, 3}), 'A')
self.assertEqual(self.t.get({1, 3, 5}), 'B')
self.assertEqual(self.t.get({1, 4}), 'C') |
<|file_name|>server.js<|end_file_name|><|fim▁begin|>import express from 'express'
import adminOnly from 'desktop/lib/admin_only'
import { buildServerApp } from 'reaction/Router'
import { routes } from './routes'
import { renderLayout } from '@artsy/stitch'
import { Meta } from './components/Meta'
const app = (module.exports = express())
app.get('/isomorphic-relay-example*', adminOnly, async (req, res, next) => {
try {
const { ServerApp, redirect, status } = await buildServerApp({
routes,
url: req.url,
})
if (redirect) {
res.redirect(302, redirect.url)
return
}
const layout = await renderLayout({
basePath: __dirname,
layout: '../../components/main_layout/templates/react_index.jade',
config: {
styledComponents: true,
},
blocks: {
head: Meta,
body: ServerApp,
},
locals: {<|fim▁hole|> })
res.status(status).send(layout)
} catch (error) {
console.log(error)
next(error)
}
})<|fim▁end|> | ...res.locals,
assetPackage: 'relay',
styledComponents: true,
}, |
<|file_name|>test_macros.rs<|end_file_name|><|fim▁begin|>macro_rules! yo {
($name: expr) => {
print!("hey {}", $name);
};<|fim▁hole|>fn main () {
println!("hello world");
yo!("carlos");
}<|fim▁end|> | }
|
<|file_name|>websocket_test.py<|end_file_name|><|fim▁begin|>from tornado.concurrent import Future
from tornado import gen
from tornado.httpclient import HTTPError
from tornado.log import gen_log
from tornado.testing import AsyncHTTPTestCase, gen_test, bind_unused_port, ExpectLog
from tornado.web import Application, RequestHandler
from tornado.websocket import WebSocketHandler, websocket_connect, WebSocketError
class EchoHandler(WebSocketHandler):
def initialize(self, close_future):
self.close_future = close_future
def on_message(self, message):
self.write_message(message, isinstance(message, bytes))
def on_close(self):
self.close_future.set_result(None)
class NonWebSocketHandler(RequestHandler):
def get(self):
self.write('ok')
class WebSocketTest(AsyncHTTPTestCase):
def get_app(self):
self.close_future = Future()
return Application([
('/echo', EchoHandler, dict(close_future=self.close_future)),
('/non_ws', NonWebSocketHandler),
])
@gen_test
def test_websocket_gen(self):<|fim▁hole|> io_loop=self.io_loop)
ws.write_message('hello')
response = yield ws.read_message()
self.assertEqual(response, 'hello')
def test_websocket_callbacks(self):
websocket_connect(
'ws://localhost:%d/echo' % self.get_http_port(),
io_loop=self.io_loop, callback=self.stop)
ws = self.wait().result()
ws.write_message('hello')
ws.read_message(self.stop)
response = self.wait().result()
self.assertEqual(response, 'hello')
@gen_test
def test_websocket_http_fail(self):
with self.assertRaises(HTTPError) as cm:
yield websocket_connect(
'ws://localhost:%d/notfound' % self.get_http_port(),
io_loop=self.io_loop)
self.assertEqual(cm.exception.code, 404)
@gen_test
def test_websocket_http_success(self):
with self.assertRaises(WebSocketError):
yield websocket_connect(
'ws://localhost:%d/non_ws' % self.get_http_port(),
io_loop=self.io_loop)
@gen_test
def test_websocket_network_fail(self):
sock, port = bind_unused_port()
sock.close()
with self.assertRaises(HTTPError) as cm:
with ExpectLog(gen_log, ".*"):
yield websocket_connect(
'ws://localhost:%d/' % port,
io_loop=self.io_loop,
connect_timeout=0.01)
self.assertEqual(cm.exception.code, 599)
@gen_test
def test_websocket_close_buffered_data(self):
ws = yield websocket_connect(
'ws://localhost:%d/echo' % self.get_http_port())
ws.write_message('hello')
ws.write_message('world')
ws.stream.close()
yield self.close_future<|fim▁end|> | ws = yield websocket_connect(
'ws://localhost:%d/echo' % self.get_http_port(), |
<|file_name|>ui.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Generic values for UI properties.
use std::fmt::{self, Write};
use style_traits::cursor::CursorKind;
use style_traits::{CssWriter, ToCss};
/// A generic value for the `cursor` property.
///
/// https://drafts.csswg.org/css-ui/#cursor
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]<|fim▁hole|> /// The kind of the cursor [default | help | ...].
pub keyword: CursorKind,
}
impl<Image> Cursor<Image> {
/// Set `cursor` to `auto`
#[inline]
pub fn auto() -> Self {
Self {
images: vec![].into_boxed_slice(),
keyword: CursorKind::Auto,
}
}
}
impl<Image: ToCss> ToCss for Cursor<Image> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
for image in &*self.images {
image.to_css(dest)?;
dest.write_str(", ")?;
}
self.keyword.to_css(dest)
}
}
/// A generic value for item of `image cursors`.
#[derive(Clone, Debug, MallocSizeOf, PartialEq, SpecifiedValueInfo, ToComputedValue)]
pub struct CursorImage<ImageUrl, Number> {
/// The url to parse images from.
pub url: ImageUrl,
/// The <x> and <y> coordinates.
pub hotspot: Option<(Number, Number)>,
}
impl<ImageUrl: ToCss, Number: ToCss> ToCss for CursorImage<ImageUrl, Number> {
fn to_css<W>(&self, dest: &mut CssWriter<W>) -> fmt::Result
where
W: Write,
{
self.url.to_css(dest)?;
if let Some((ref x, ref y)) = self.hotspot {
dest.write_str(" ")?;
x.to_css(dest)?;
dest.write_str(" ")?;
y.to_css(dest)?;
}
Ok(())
}
}
/// A generic value for `scrollbar-color` property.
///
/// https://drafts.csswg.org/css-scrollbars-1/#scrollbar-color
#[derive(
Animate,
Clone,
ComputeSquaredDistance,
Copy,
Debug,
MallocSizeOf,
PartialEq,
SpecifiedValueInfo,
ToAnimatedValue,
ToAnimatedZero,
ToComputedValue,
ToCss,
)]
pub enum ScrollbarColor<Color> {
/// `auto`
Auto,
/// `<color>{2}`
Colors {
/// First `<color>`, for color of the scrollbar thumb.
thumb: Color,
/// Second `<color>`, for color of the scrollbar track.
track: Color,
},
}
impl<Color> Default for ScrollbarColor<Color> {
#[inline]
fn default() -> Self {
ScrollbarColor::Auto
}
}<|fim▁end|> | pub struct Cursor<Image> {
/// The parsed images for the cursor.
pub images: Box<[Image]>, |
<|file_name|>read_manifest.rs<|end_file_name|><|fim▁begin|>use std::env;
use std::error::Error;
use cargo::core::{Package, Source};
use cargo::util::{CliResult, CliError, Config};
use cargo::util::important_paths::{find_root_manifest_for_cwd};
use cargo::sources::{PathSource};
#[derive(RustcDecodable)]
struct Options {
flag_manifest_path: Option<String>,
flag_color: Option<String>,
}
pub const USAGE: &'static str = "
Usage:
cargo read-manifest [options]
cargo read-manifest -h | --help
Options:
-h, --help Print this message
-v, --verbose Use verbose output
--manifest-path PATH Path to the manifest to compile
--color WHEN Coloring: auto, always, never
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<Package>> {
debug!("executing; cmd=cargo-read-manifest; args={:?}",
env::args().collect::<Vec<_>>());
try!(config.shell().set_color_config(options.flag_color.as_ref().map(|s| &s[..])));
let root = try!(find_root_manifest_for_cwd(options.flag_manifest_path));
let mut source = try!(PathSource::for_path(root.parent().unwrap(), config).map_err(|e| {
CliError::new(e.description(), 1)
}));<|fim▁hole|>
source.root_package()
.map(|pkg| Some(pkg))
.map_err(|err| CliError::from_boxed(err, 1))
}<|fim▁end|> |
try!(source.update().map_err(|err| CliError::new(err.description(), 1))); |
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>use snafu::Snafu;
/// Represents an error during serialization/deserialization process
#[derive(Debug, Snafu)]
pub enum Error {
#[snafu(display("Wrong encoding"))]
WrongEncoding {},
#[snafu(display("{}", source))]
#[snafu(context(false))]
UnknownSpecVersion {
source: crate::event::UnknownSpecVersion,
},
#[snafu(display("Unknown attribute in this spec version: {}", name))]
UnknownAttribute { name: String },
#[snafu(display("Error while building the final event: {}", source))]
#[snafu(context(false))]
EventBuilderError {
source: crate::event::EventBuilderError,
},
#[snafu(display("Error while parsing a time string: {}", source))]
#[snafu(context(false))]
ParseTimeError { source: chrono::ParseError },
#[snafu(display("Error while parsing a url: {}", source))]
#[snafu(context(false))]
ParseUrlError { source: url::ParseError },
#[snafu(display("Error while decoding base64: {}", source))]
#[snafu(context(false))]
Base64DecodingError { source: base64::DecodeError },
#[snafu(display("Error while serializing/deserializing to json: {}", source))]
#[snafu(context(false))]
SerdeJsonError { source: serde_json::Error },<|fim▁hole|> IOError { source: std::io::Error },
#[snafu(display("Other error: {}", source))]
Other {
source: Box<dyn std::error::Error + Send + Sync>,
},
}
/// Result type alias for return values during serialization/deserialization process
pub type Result<T> = std::result::Result<T, Error>;<|fim▁end|> | #[snafu(display("IO Error: {}", source))]
#[snafu(context(false))] |
<|file_name|>symlinkAtomic.ts<|end_file_name|><|fim▁begin|>import ava from 'ava';
import { promises as fs } from 'fs';
import { tempFile, tempFileLoc, tempDir, tempDirLoc } from './lib';
import * as nextra from '../src';
ava('File', async (test): Promise<void> => {
test.plan(2);
const file = tempFile();
const newFile = tempFileLoc();
const retVal = await nextra.symlinkAtomic(file, newFile);
const stats = await fs.lstat(newFile);
test.is(retVal, undefined);
test.true(stats.isSymbolicLink());
});
ava('Directory', async (test): Promise<void> => {
test.plan(2);
const dir = tempDir();
const newDir = tempDirLoc();
const retVal = await nextra.symlinkAtomic(dir, newDir);
const stats = await fs.lstat(newDir);
test.is(retVal, undefined);<|fim▁hole|><|fim▁end|> | test.true(stats.isSymbolicLink());
}); |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian
// Licensed under the MIT License <LICENSE.md>
//! FFI bindings to mspbase.
#![no_std]
#![experimental]
extern crate winapi;
use winapi::*;<|fim▁hole|>extern "system" {
}<|fim▁end|> | |
<|file_name|>fontloader.mdl.js<|end_file_name|><|fim▁begin|>/* global WebFont */<|fim▁hole|>(function () {
'use strict';
function FontLoaderFactory () {
return {
setFonts : function () {
WebFont.load({
custom: {
families: [ 'FontAwesome','Ubuntu','Oxygen','Open Sans' ],
urls: [ '/fonts/base.css']
}
});
}
};
}
angular.module('app.core.fontloader', [])
.factory('FontLoader',FontLoaderFactory);
})();<|fim▁end|> | |
<|file_name|>BuyAction.cpp<|end_file_name|><|fim▁begin|>#include "botpch.h"
#include "../../playerbot.h"
#include "BuyAction.h"
#include "../ItemVisitors.h"
#include "../values/ItemCountValue.h"
using namespace ai;
bool BuyAction::Execute(Event event)
{
string link = event.getParam();
ItemIds itemIds = chat->parseItems(link);
if (itemIds.empty())
return false;
<|fim▁hole|> Player* master = GetMaster();
if (!master)
return false;
ObjectGuid vendorguid = master->GetSelectionGuid();
if (!vendorguid)
return false;
Creature *pCreature = bot->GetNPCIfCanInteractWith(vendorguid,UNIT_NPC_FLAG_VENDOR);
if (!pCreature)
{
ai->TellMaster("Cannot talk to vendor");
return false;
}
VendorItemData const* tItems = pCreature->GetVendorItems();
if (!tItems)
{
ai->TellMaster("This vendor has no items");
return false;
}
for (ItemIds::iterator i = itemIds.begin(); i != itemIds.end(); i++)
{
for (uint32 slot = 0; slot < tItems->GetItemCount(); slot++)
{
if (tItems->GetItem(slot)->item == *i)
{
bot->BuyItemFromVendor(vendorguid, *i, 1, NULL_BAG, NULL_SLOT);
ai->TellMaster("Bought item");
}
}
}
return true;
}<|fim▁end|> | |
<|file_name|>execute_action.cc<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/file_system_provider/operations/execute_action.h"
#include <algorithm>
#include <string>
#include "chrome/common/extensions/api/file_system_provider.h"
#include "chrome/common/extensions/api/file_system_provider_internal.h"
namespace chromeos {
namespace file_system_provider {
namespace operations {
ExecuteAction::ExecuteAction(
extensions::EventRouter* event_router,
const ProvidedFileSystemInfo& file_system_info,
const base::FilePath& entry_path,
const std::string& action_id,
const storage::AsyncFileUtil::StatusCallback& callback)
: Operation(event_router, file_system_info),
entry_path_(entry_path),
action_id_(action_id),
callback_(callback) {<|fim▁hole|>
bool ExecuteAction::Execute(int request_id) {
using extensions::api::file_system_provider::ExecuteActionRequestedOptions;
ExecuteActionRequestedOptions options;
options.file_system_id = file_system_info_.file_system_id();
options.request_id = request_id;
options.entry_path = entry_path_.AsUTF8Unsafe();
options.action_id = action_id_;
return SendEvent(
request_id,
extensions::events::FILE_SYSTEM_PROVIDER_ON_EXECUTE_ACTION_REQUESTED,
extensions::api::file_system_provider::OnExecuteActionRequested::
kEventName,
extensions::api::file_system_provider::OnExecuteActionRequested::Create(
options));
}
void ExecuteAction::OnSuccess(int /* request_id */,
scoped_ptr<RequestValue> result,
bool has_more) {
callback_.Run(base::File::FILE_OK);
}
void ExecuteAction::OnError(int /* request_id */,
scoped_ptr<RequestValue> /* result */,
base::File::Error error) {
callback_.Run(error);
}
} // namespace operations
} // namespace file_system_provider
} // namespace chromeos<|fim▁end|> | }
ExecuteAction::~ExecuteAction() {
} |
<|file_name|>checkstyle.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright<|fim▁hole|># * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
from webkitpy.common.system.executive import ScriptError
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
from webkitpy.common.system.deprecated_logging import error
class CheckStyle(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.non_interactive,
Options.check_style,
Options.git_commit,
]
def run(self, state):
if not self._options.check_style:
return
os.chdir(self._tool.scm().checkout_root)
args = []
if self._options.git_commit:
args.append("--git-commit")
args.append(self._options.git_commit)
args.append("--diff-files")
args.extend(self._changed_files(state))
try:
self._tool.executive.run_and_throw_if_fail(self._tool.port().check_webkit_style_command() + args)
except ScriptError, e:
if self._options.non_interactive:
# We need to re-raise the exception here to have the
# style-queue do the right thing.
raise e
if not self._tool.user.confirm("Are you sure you want to continue?"):
exit(1)<|fim▁end|> | # notice, this list of conditions and the following disclaimer. |
<|file_name|>internal-model.js<|end_file_name|><|fim▁begin|>import merge from "ember-data/system/merge";
import RootState from "ember-data/system/model/states";
import Relationships from "ember-data/system/relationships/state/create";
import Snapshot from "ember-data/system/snapshot";
import EmptyObject from "ember-data/system/empty-object";
var Promise = Ember.RSVP.Promise;
var get = Ember.get;
var set = Ember.set;
var _extractPivotNameCache = new EmptyObject();
var _splitOnDotCache = new EmptyObject();
function splitOnDot(name) {
return _splitOnDotCache[name] || (
_splitOnDotCache[name] = name.split('.')
);
}
function extractPivotName(name) {
return _extractPivotNameCache[name] || (
_extractPivotNameCache[name] = splitOnDot(name)[0]
);
}
function retrieveFromCurrentState(key) {
return function() {
return get(this.currentState, key);
};
}
var guid = 0;
/**
`InternalModel` is the Model class that we use internally inside Ember Data to represent models.
Internal ED methods should only deal with `InternalModel` objects. It is a fast, plain Javascript class.
We expose `DS.Model` to application code, by materializing a `DS.Model` from `InternalModel` lazily, as
a performance optimization.
`InternalModel` should never be exposed to application code. At the boundaries of the system, in places
like `find`, `push`, etc. we convert between Models and InternalModels.
We need to make sure that the properties from `InternalModel` are correctly exposed/proxied on `Model`
if they are needed.
@class InternalModel
*/
export default function InternalModel(type, id, store, container, data) {
this.type = type;
this.id = id;
this.store = store;
this.container = container;
this._data = data || new EmptyObject();
this.modelName = type.modelName;
this.dataHasInitialized = false;
//Look into making this lazy
this._deferredTriggers = [];
this._attributes = new EmptyObject();
this._inFlightAttributes = new EmptyObject();
this._relationships = new Relationships(this);
this._recordArrays = undefined;
this.currentState = RootState.empty;
this.isReloading = false;
this.isError = false;
this.error = null;
this[Ember.GUID_KEY] = guid++ + 'internal-model';
/*
implicit relationships are relationship which have not been declared but the inverse side exists on
another record somewhere
For example if there was
```app/models/comment.js
import DS from 'ember-data';
export default DS.Model.extend({
name: DS.attr()
})
```
but there is also
```app/models/post.js
import DS from 'ember-data';
export default DS.Model.extend({
name: DS.attr(),
comments: DS.hasMany('comment')
})
```
would have a implicit post relationship in order to be do things like remove ourselves from the post
when we are deleted
*/
this._implicitRelationships = new EmptyObject();
}
InternalModel.prototype = {
isEmpty: retrieveFromCurrentState('isEmpty'),
isLoading: retrieveFromCurrentState('isLoading'),
isLoaded: retrieveFromCurrentState('isLoaded'),
hasDirtyAttributes: retrieveFromCurrentState('hasDirtyAttributes'),
isSaving: retrieveFromCurrentState('isSaving'),
isDeleted: retrieveFromCurrentState('isDeleted'),
isNew: retrieveFromCurrentState('isNew'),
isValid: retrieveFromCurrentState('isValid'),
dirtyType: retrieveFromCurrentState('dirtyType'),
constructor: InternalModel,
materializeRecord: function() {
Ember.assert("Materialized " + this.modelName + " record with id:" + this.id + "more than once", this.record === null || this.record === undefined);
// lookupFactory should really return an object that creates
// instances with the injections applied
this.record = this.type._create({
store: this.store,
container: this.container,
_internalModel: this,
currentState: get(this, 'currentState'),
isError: this.isError,
adapterError: this.error
});
this._triggerDeferredTriggers();
},
recordObjectWillDestroy: function() {
this.record = null;
},
deleteRecord: function() {
this.send('deleteRecord');
},
save: function(options) {
var promiseLabel = "DS: Model#save " + this;
var resolver = Ember.RSVP.defer(promiseLabel);
this.store.scheduleSave(this, resolver, options);
return resolver.promise;
},
startedReloading: function() {
this.isReloading = true;
if (this.record) {
set(this.record, 'isReloading', true);
}
},
finishedReloading: function() {
this.isReloading = false;
if (this.record) {
set(this.record, 'isReloading', false);
}
},
reload: function() {
this.startedReloading();
var record = this;
var promiseLabel = "DS: Model#reload of " + this;
return new Promise(function(resolve) {
record.send('reloadRecord', resolve);
}, promiseLabel).then(function() {
record.didCleanError();
return record;
}, function(error) {
record.didError(error);
throw error;
}, "DS: Model#reload complete, update flags").finally(function () {
record.finishedReloading();
record.updateRecordArrays();
});
},
getRecord: function() {
if (!this.record) {
this.materializeRecord();
}
return this.record;
},
unloadRecord: function() {
this.send('unloadRecord');
},
eachRelationship: function(callback, binding) {
return this.type.eachRelationship(callback, binding);
},
eachAttribute: function(callback, binding) {
return this.type.eachAttribute(callback, binding);
},
inverseFor: function(key) {
return this.type.inverseFor(key);
},
setupData: function(data) {
var changedKeys = this._changedKeys(data.attributes);
merge(this._data, data.attributes);
this.pushedData();
if (this.record) {
this.record._notifyProperties(changedKeys);
}
this.didInitalizeData();
},
becameReady: function() {
Ember.run.schedule('actions', this.store.recordArrayManager, this.store.recordArrayManager.recordWasLoaded, this);
},
didInitalizeData: function() {
if (!this.dataHasInitialized) {
this.becameReady();
this.dataHasInitialized = true;
}
},
destroy: function() {
if (this.record) {
return this.record.destroy();
}
},
/**
@method createSnapshot
@private
*/
createSnapshot: function(options) {
var adapterOptions = options && options.adapterOptions;
var snapshot = new Snapshot(this);
snapshot.adapterOptions = adapterOptions;
return snapshot;
},
/**
@method loadingData
@private
@param {Promise} promise
*/
loadingData: function(promise) {
this.send('loadingData', promise);
},
/**
@method loadedData
@private
*/
loadedData: function() {
this.send('loadedData');
this.didInitalizeData();
},
/**
@method notFound
@private
*/
notFound: function() {
this.send('notFound');
},
/**
@method pushedData
@private
*/
pushedData: function() {
this.send('pushedData');
},
flushChangedAttributes: function() {
this._inFlightAttributes = this._attributes;
this._attributes = new EmptyObject();
},
/**
@method adapterWillCommit
@private
*/
adapterWillCommit: function() {
this.send('willCommit');
},
/**
@method adapterDidDirty
@private
*/
adapterDidDirty: function() {
this.send('becomeDirty');
this.updateRecordArraysLater();
},
/**
@method send
@private
@param {String} name
@param {Object} context
*/
send: function(name, context) {
var currentState = get(this, 'currentState');
if (!currentState[name]) {
this._unhandledEvent(currentState, name, context);
}
return currentState[name](this, context);
},
notifyHasManyAdded: function(key, record, idx) {
if (this.record) {
this.record.notifyHasManyAdded(key, record, idx);
}
},
notifyHasManyRemoved: function(key, record, idx) {
if (this.record) {
this.record.notifyHasManyRemoved(key, record, idx);
}
},
notifyBelongsToChanged: function(key, record) {
if (this.record) {
this.record.notifyBelongsToChanged(key, record);
}
},
notifyPropertyChange: function(key) {
if (this.record) {
this.record.notifyPropertyChange(key);
}
},
rollbackAttributes: function() {
var dirtyKeys = Object.keys(this._attributes);
this._attributes = new EmptyObject();
if (get(this, 'isError')) {
this._inFlightAttributes = new EmptyObject();
this.didCleanError();
}
//Eventually rollback will always work for relationships
//For now we support it only out of deleted state, because we
//have an explicit way of knowing when the server acked the relationship change
if (this.isDeleted()) {
//TODO: Should probably move this to the state machine somehow
this.becameReady();
}
if (this.isNew()) {
this.clearRelationships();
}
if (this.isValid()) {
this._inFlightAttributes = new EmptyObject();
}
this.send('rolledBack');
this.record._notifyProperties(dirtyKeys);
},
/**
@method transitionTo
@private
@param {String} name
*/
transitionTo: function(name) {
// POSSIBLE TODO: Remove this code and replace with
// always having direct reference to state objects
var pivotName = extractPivotName(name);
var currentState = get(this, 'currentState');
var state = currentState;
do {
if (state.exit) { state.exit(this); }
state = state.parentState;
} while (!state.hasOwnProperty(pivotName));
var path = splitOnDot(name);
var setups = [];
var enters = [];
var i, l;
for (i=0, l=path.length; i<l; i++) {
state = state[path[i]];
if (state.enter) { enters.push(state); }
if (state.setup) { setups.push(state); }
}
for (i=0, l=enters.length; i<l; i++) {
enters[i].enter(this);
}
set(this, 'currentState', state);
//TODO Consider whether this is the best approach for keeping these two in sync
if (this.record) {
set(this.record, 'currentState', state);
}
for (i=0, l=setups.length; i<l; i++) {
setups[i].setup(this);
}
this.updateRecordArraysLater();
},
_unhandledEvent: function(state, name, context) {
var errorMessage = "Attempted to handle event `" + name + "` ";
errorMessage += "on " + String(this) + " while in state ";
errorMessage += state.stateName + ". ";
if (context !== undefined) {
errorMessage += "Called with " + Ember.inspect(context) + ".";
}
throw new Ember.Error(errorMessage);
},
triggerLater: function() {
var length = arguments.length;
var args = new Array(length);
for (var i = 0; i < length; i++) {
args[i] = arguments[i];
}
if (this._deferredTriggers.push(args) !== 1) {
return;
}
Ember.run.scheduleOnce('actions', this, '_triggerDeferredTriggers');
},
_triggerDeferredTriggers: function() {
//TODO: Before 1.0 we want to remove all the events that happen on the pre materialized record,
//but for now, we queue up all the events triggered before the record was materialized, and flush
//them once we have the record
if (!this.record) {
return;
}
for (var i=0, l= this._deferredTriggers.length; i<l; i++) {
this.record.trigger.apply(this.record, this._deferredTriggers[i]);
}
this._deferredTriggers.length = 0;
},
/**
@method clearRelationships
@private
*/
clearRelationships: function() {
this.eachRelationship((name, relationship) => {
if (this._relationships.has(name)) {
var rel = this._relationships.get(name);
rel.clear();
rel.destroy();
}
});
Object.keys(this._implicitRelationships).forEach((key) => {
this._implicitRelationships[key].clear();
this._implicitRelationships[key].destroy();
});
},
/**
When a find request is triggered on the store, the user can optionally pass in
attributes and relationships to be preloaded. These are meant to behave as if they
came back from the server, except the user obtained them out of band and is informing
the store of their existence. The most common use case is for supporting client side
nested URLs, such as `/posts/1/comments/2` so the user can do
`store.find('comment', 2, {post:1})` without having to fetch the post.
Preloaded data can be attributes and relationships passed in either as IDs or as actual
models.
@method _preloadData
@private
@param {Object} preload
*/
_preloadData: function(preload) {
//TODO(Igor) consider the polymorphic case
Object.keys(preload).forEach((key) => {
var preloadValue = get(preload, key);
var relationshipMeta = this.type.metaForProperty(key);
if (relationshipMeta.isRelationship) {
this._preloadRelationship(key, preloadValue);
} else {
this._data[key] = preloadValue;
}
});
},
_preloadRelationship: function(key, preloadValue) {
var relationshipMeta = this.type.metaForProperty(key);
var type = relationshipMeta.type;
if (relationshipMeta.kind === 'hasMany') {
this._preloadHasMany(key, preloadValue, type);
} else {
this._preloadBelongsTo(key, preloadValue, type);
}
},
_preloadHasMany: function(key, preloadValue, type) {
Ember.assert("You need to pass in an array to set a hasMany property on a record", Ember.isArray(preloadValue));
var internalModel = this;
var recordsToSet = preloadValue.map((recordToPush) => {
return internalModel._convertStringOrNumberIntoInternalModel(recordToPush, type);
});
//We use the pathway of setting the hasMany as if it came from the adapter
//because the user told us that they know this relationships exists already
this._relationships.get(key).updateRecordsFromAdapter(recordsToSet);
},
_preloadBelongsTo: function(key, preloadValue, type) {
var recordToSet = this._convertStringOrNumberIntoInternalModel(preloadValue, type);
//We use the pathway of setting the hasMany as if it came from the adapter
//because the user told us that they know this relationships exists already
this._relationships.get(key).setRecord(recordToSet);
},
_convertStringOrNumberIntoInternalModel: function(value, type) {
if (typeof value === 'string' || typeof value === 'number') {
return this.store._internalModelForId(type, value);
}
if (value._internalModel) {
return value._internalModel;
}
return value;
},
/**
@method updateRecordArrays
@private
*/
updateRecordArrays: function() {
this._updatingRecordArraysLater = false;
this.store.dataWasUpdated(this.type, this);
},
setId: function(id) {
Ember.assert('A record\'s id cannot be changed once it is in the loaded state', this.id === null || this.id === id || this.isNew());
this.id = id;
},
didError: function(error) {
this.error = error;
this.isError = true;
if (this.record) {
this.record.setProperties({
isError: true,
adapterError: error
});
}
},
didCleanError: function() {
this.error = null;
this.isError = false;
if (this.record) {
this.record.setProperties({
isError: false,
adapterError: null
});
}
},
/**
If the adapter did not return a hash in response to a commit,
merge the changed attributes and relationships into the existing
saved data.
@method adapterDidCommit
*/
adapterDidCommit: function(data) {
if (data) {
data = data.attributes;
}
this.didCleanError();
var changedKeys = this._changedKeys(data);
merge(this._data, this._inFlightAttributes);
if (data) {
merge(this._data, data);
}
<|fim▁hole|> this._inFlightAttributes = new EmptyObject();
this.send('didCommit');
this.updateRecordArraysLater();
if (!data) { return; }
this.record._notifyProperties(changedKeys);
},
/**
@method updateRecordArraysLater
@private
*/
updateRecordArraysLater: function() {
// quick hack (something like this could be pushed into run.once
if (this._updatingRecordArraysLater) { return; }
this._updatingRecordArraysLater = true;
Ember.run.schedule('actions', this, this.updateRecordArrays);
},
addErrorMessageToAttribute: function(attribute, message) {
var record = this.getRecord();
get(record, 'errors').add(attribute, message);
},
removeErrorMessageFromAttribute: function(attribute) {
var record = this.getRecord();
get(record, 'errors').remove(attribute);
},
clearErrorMessages: function() {
var record = this.getRecord();
get(record, 'errors').clear();
},
// FOR USE DURING COMMIT PROCESS
/**
@method adapterDidInvalidate
@private
*/
adapterDidInvalidate: function(errors) {
var attribute;
for (attribute in errors) {
if (errors.hasOwnProperty(attribute)) {
this.addErrorMessageToAttribute(attribute, errors[attribute]);
}
}
this._saveWasRejected();
},
/**
@method adapterDidError
@private
*/
adapterDidError: function(error) {
this.send('becameError');
this.didError(error);
this._saveWasRejected();
},
_saveWasRejected: function() {
var keys = Object.keys(this._inFlightAttributes);
for (var i=0; i < keys.length; i++) {
if (this._attributes[keys[i]] === undefined) {
this._attributes[keys[i]] = this._inFlightAttributes[keys[i]];
}
}
this._inFlightAttributes = new EmptyObject();
},
/**
Ember Data has 3 buckets for storing the value of an attribute on an internalModel.
`_data` holds all of the attributes that have been acknowledged by
a backend via the adapter. When rollbackAttributes is called on a model all
attributes will revert to the record's state in `_data`.
`_attributes` holds any change the user has made to an attribute
that has not been acknowledged by the adapter. Any values in
`_attributes` are have priority over values in `_data`.
`_inFlightAttributes`. When a record is being synced with the
backend the values in `_attributes` are copied to
`_inFlightAttributes`. This way if the backend acknowledges the
save but does not return the new state Ember Data can copy the
values from `_inFlightAttributes` to `_data`. Without having to
worry about changes made to `_attributes` while the save was
happenign.
Changed keys builds a list of all of the values that may have been
changed by the backend after a successful save.
It does this by iterating over each key, value pair in the payload
returned from the server after a save. If the `key` is found in
`_attributes` then the user has a local changed to the attribute
that has not been synced with the server and the key is not
included in the list of changed keys.
If the value, for a key differs from the value in what Ember Data
believes to be the truth about the backend state (A merger of the
`_data` and `_inFlightAttributes` objects where
`_inFlightAttributes` has priority) then that means the backend
has updated the value and the key is added to the list of changed
keys.
@method _changedKeys
@private
*/
_changedKeys: function(updates) {
var changedKeys = [];
if (updates) {
var original, i, value, key;
var keys = Object.keys(updates);
var length = keys.length;
original = merge(new EmptyObject(), this._data);
original = merge(original, this._inFlightAttributes);
for (i = 0; i < length; i++) {
key = keys[i];
value = updates[key];
// A value in _attributes means the user has a local change to
// this attributes. We never override this value when merging
// updates from the backend so we should not sent a change
// notification if the server value differs from the original.
if (this._attributes[key] !== undefined) {
continue;
}
if (!Ember.isEqual(original[key], value)) {
changedKeys.push(key);
}
}
}
return changedKeys;
},
toString: function() {
if (this.record) {
return this.record.toString();
} else {
return `<${this.modelName}:${this.id}>`;
}
}
};<|fim▁end|> | |
<|file_name|>config.rs<|end_file_name|><|fim▁begin|>use toml;
use std::fs::File;
use std::io::BufReader;
use std::io::prelude::*;<|fim▁hole|>#[derive(Debug, Deserialize)]
pub struct Config {
pub uplink: Uplink,
pub plugins: Option<Vec<Plugin>>,
}
#[derive(Debug, Deserialize)]
pub struct Uplink {
pub ip: String,
pub port: i32,
pub protocol: String,
pub hostname: String,
pub description: String,
pub send_pass: String,
pub recv_pass: String,
pub numeric: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct Plugin {
pub file: String,
pub load: Option<bool>,
}
pub fn get_protocol() -> Result<String, Box<::std::error::Error>> {
let file = File::open("etc/nero.toml")?;
let mut buf_reader = BufReader::new(file);
let mut contents = String::new();
buf_reader.read_to_string(&mut contents)?;
let cfg: Config = toml::from_str(&contents)?;
Ok(cfg.uplink.protocol)
}
pub fn load() -> Result<Result<Config, toml::de::Error>, ::std::io::Error> {
let file = File::open("etc/nero.toml")?;
let mut buf_reader = BufReader::new(file);
let mut contents = String::new();
buf_reader.read_to_string(&mut contents)?;
Ok(toml::from_str(&contents))
}<|fim▁end|> | |
<|file_name|>htmlscriptelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::document_loader::LoadType;
use crate::dom::attr::Attr;
use crate::dom::bindings::codegen::Bindings::DocumentBinding::DocumentMethods;
use crate::dom::bindings::codegen::Bindings::HTMLScriptElementBinding::HTMLScriptElementMethods;
use crate::dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::DomObject;
use crate::dom::bindings::root::{Dom, DomRoot};
use crate::dom::bindings::settings_stack::AutoEntryScript;
use crate::dom::bindings::str::{DOMString, USVString};
use crate::dom::document::Document;
use crate::dom::element::{
cors_setting_for_element, reflect_cross_origin_attribute, set_cross_origin_attribute,
};
use crate::dom::element::{AttributeMutation, Element, ElementCreator};
use crate::dom::event::{Event, EventBubbles, EventCancelable, EventStatus};
use crate::dom::globalscope::GlobalScope;
use crate::dom::htmlelement::HTMLElement;
use crate::dom::node::{document_from_node, window_from_node};
use crate::dom::node::{BindContext, ChildrenMutation, CloneChildrenFlag, Node};
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::virtualmethods::VirtualMethods;
use crate::fetch::create_a_potential_cors_request;
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use crate::script_module::fetch_inline_module_script;
use crate::script_module::{fetch_external_module_script, ModuleOwner};
use content_security_policy as csp;
use dom_struct::dom_struct;
use encoding_rs::Encoding;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use js::jsval::UndefinedValue;
use msg::constellation_msg::PipelineId;
use net_traits::request::{CorsSettings, CredentialsMode, Destination, Referrer, RequestBuilder};
use net_traits::ReferrerPolicy;
use net_traits::{FetchMetadata, FetchResponseListener, Metadata, NetworkError};
use net_traits::{ResourceFetchTiming, ResourceTimingType};
use servo_atoms::Atom;
use servo_url::ImmutableOrigin;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::fs::{create_dir_all, read_to_string, File};
use std::io::{Read, Seek, Write};
use std::path::PathBuf;
use std::process::Command;
use std::sync::{Arc, Mutex};
use style::str::{StaticStringVec, HTML_SPACE_CHARACTERS};
use uuid::Uuid;
/// An unique id for script element.
#[derive(Clone, Copy, Debug, Eq, Hash, JSTraceable, PartialEq)]
pub struct ScriptId(Uuid);
#[dom_struct]
pub struct HTMLScriptElement {
htmlelement: HTMLElement,
/// <https://html.spec.whatwg.org/multipage/#already-started>
already_started: Cell<bool>,
/// <https://html.spec.whatwg.org/multipage/#parser-inserted>
parser_inserted: Cell<bool>,
/// <https://html.spec.whatwg.org/multipage/#non-blocking>
///
/// (currently unused)
non_blocking: Cell<bool>,
/// Document of the parser that created this element
parser_document: Dom<Document>,
/// Track line line_number
line_number: u64,
/// Unique id for each script element
#[ignore_malloc_size_of = "Defined in uuid"]
id: ScriptId,
}
impl HTMLScriptElement {
fn new_inherited(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
creator: ElementCreator,
) -> HTMLScriptElement {
HTMLScriptElement {
id: ScriptId(Uuid::new_v4()),
htmlelement: HTMLElement::new_inherited(local_name, prefix, document),
already_started: Cell::new(false),
parser_inserted: Cell::new(creator.is_parser_created()),
non_blocking: Cell::new(!creator.is_parser_created()),
parser_document: Dom::from_ref(document),
line_number: creator.return_line_number(),
}
}
#[allow(unrooted_must_root)]
pub fn new(
local_name: LocalName,
prefix: Option<Prefix>,
document: &Document,
creator: ElementCreator,
) -> DomRoot<HTMLScriptElement> {
Node::reflect_node(
Box::new(HTMLScriptElement::new_inherited(
local_name, prefix, document, creator,
)),
document,
)
}
pub fn get_script_id(&self) -> ScriptId {
self.id.clone()
}
}
/// Supported script types as defined by
/// <https://html.spec.whatwg.org/multipage/#javascript-mime-type>.
pub static SCRIPT_JS_MIMES: StaticStringVec = &[
"application/ecmascript",
"application/javascript",
"application/x-ecmascript",
"application/x-javascript",
"text/ecmascript",
"text/javascript",
"text/javascript1.0",
"text/javascript1.1",
"text/javascript1.2",
"text/javascript1.3",
"text/javascript1.4",
"text/javascript1.5",
"text/jscript",
"text/livescript",
"text/x-ecmascript",
"text/x-javascript",
];
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub enum ScriptType {
Classic,
Module,
}
#[derive(JSTraceable, MallocSizeOf)]
pub struct ScriptOrigin {
text: DOMString,
url: ServoUrl,
external: bool,
type_: ScriptType,
}
impl ScriptOrigin {
pub fn internal(text: DOMString, url: ServoUrl, type_: ScriptType) -> ScriptOrigin {
ScriptOrigin {
text: text,
url: url,
external: false,
type_,
}
}
pub fn external(text: DOMString, url: ServoUrl, type_: ScriptType) -> ScriptOrigin {
ScriptOrigin {
text: text,
url: url,
external: true,
type_,
}
}
pub fn text(&self) -> DOMString {
self.text.clone()
}
}
pub type ScriptResult = Result<ScriptOrigin, NetworkError>;
/// The context required for asynchronously loading an external script source.
struct ClassicContext {
/// The element that initiated the request.
elem: Trusted<HTMLScriptElement>,
/// The kind of external script.
kind: ExternalScriptKind,
/// The (fallback) character encoding argument to the "fetch a classic
/// script" algorithm.
character_encoding: &'static Encoding,
/// The response body received to date.
data: Vec<u8>,
/// The response metadata received to date.
metadata: Option<Metadata>,
/// The initial URL requested.
url: ServoUrl,
/// Indicates whether the request failed, and why
status: Result<(), NetworkError>,
/// Timing object for this resource
resource_timing: ResourceFetchTiming,
}
impl FetchResponseListener for ClassicContext {
fn process_request_body(&mut self) {} // TODO(KiChjang): Perhaps add custom steps to perform fetch here?
fn process_request_eof(&mut self) {} // TODO(KiChjang): Perhaps add custom steps to perform fetch here?
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
self.metadata = metadata.ok().map(|meta| match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_,
});
let status_code = self
.metadata
.as_ref()
.and_then(|m| match m.status {
Some((c, _)) => Some(c),
_ => None,
})
.unwrap_or(0);
self.status = match status_code {
0 => Err(NetworkError::Internal(
"No http status code received".to_owned(),
)),
200..=299 => Ok(()), // HTTP ok status codes
_ => Err(NetworkError::Internal(format!(
"HTTP error code {}",
status_code
))),
};
}
fn process_response_chunk(&mut self, mut chunk: Vec<u8>) {
if self.status.is_ok() {
self.data.append(&mut chunk);
}
}
/// <https://html.spec.whatwg.org/multipage/#fetch-a-classic-script>
/// step 4-9
fn process_response_eof(&mut self, response: Result<ResourceFetchTiming, NetworkError>) {
// Step 5.
let load = response.and(self.status.clone()).map(|_| {
let metadata = self.metadata.take().unwrap();
// Step 6.
let encoding = metadata
.charset
.and_then(|encoding| Encoding::for_label(encoding.as_bytes()))
.unwrap_or(self.character_encoding);
// Step 7.
let (source_text, _, _) = encoding.decode(&self.data);
ScriptOrigin::external(
DOMString::from(source_text),
metadata.final_url,
ScriptType::Classic,
)
});
// Step 9.
// https://html.spec.whatwg.org/multipage/#prepare-a-script
// Step 18.6 (When the chosen algorithm asynchronously completes).
let elem = self.elem.root();
let document = document_from_node(&*elem);
match self.kind {
ExternalScriptKind::Asap => document.asap_script_loaded(&elem, load),
ExternalScriptKind::AsapInOrder => document.asap_in_order_script_loaded(&elem, load),
ExternalScriptKind::Deferred => document.deferred_script_loaded(&elem, load),
ExternalScriptKind::ParsingBlocking => {
document.pending_parsing_blocking_script_loaded(&elem, load)
},
}
document.finish_load(LoadType::Script(self.url.clone()));
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for ClassicContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
let initiator_type = InitiatorType::LocalName(
self.elem
.root()
.upcast::<Element>()
.local_name()
.to_string(),
);
(initiator_type, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
document_from_node(&*self.elem.root()).global()
}
}
impl PreInvoke for ClassicContext {}
/// Steps 1-2 of <https://html.spec.whatwg.org/multipage/#fetch-a-classic-script>
// This function is also used to prefetch a script in `script::dom::servoparser::prefetch`.
pub(crate) fn script_fetch_request(
url: ServoUrl,
cors_setting: Option<CorsSettings>,
origin: ImmutableOrigin,
pipeline_id: PipelineId,
referrer: Referrer,
referrer_policy: Option<ReferrerPolicy>,
integrity_metadata: String,
) -> RequestBuilder {
create_a_potential_cors_request(url, Destination::Script, cors_setting, None)
.origin(origin)
.pipeline_id(Some(pipeline_id))
.referrer(Some(referrer))
.referrer_policy(referrer_policy)
.integrity_metadata(integrity_metadata)
}
/// <https://html.spec.whatwg.org/multipage/#fetch-a-classic-script>
fn fetch_a_classic_script(
script: &HTMLScriptElement,
kind: ExternalScriptKind,
url: ServoUrl,
cors_setting: Option<CorsSettings>,
integrity_metadata: String,
character_encoding: &'static Encoding,
) {
let doc = document_from_node(script);
// Step 1, 2.
let request = script_fetch_request(
url.clone(),
cors_setting,
doc.origin().immutable().clone(),
script.global().pipeline_id(),
Referrer::ReferrerUrl(doc.url()),
doc.get_referrer_policy(),
integrity_metadata,
);
// TODO: Step 3, Add custom steps to perform fetch
let context = Arc::new(Mutex::new(ClassicContext {
elem: Trusted::new(script),
kind: kind,
character_encoding: character_encoding,
data: vec![],
metadata: None,
url: url.clone(),
status: Ok(()),
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
}));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let (task_source, canceller) = doc
.window()
.task_manager()
.networking_task_source_with_canceller();
let listener = NetworkListener {
context,
task_source,
canceller: Some(canceller),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
doc.fetch_async(LoadType::Script(url), request, action_sender);
}
impl HTMLScriptElement {
/// <https://html.spec.whatwg.org/multipage/#prepare-a-script>
pub fn prepare(&self) {
// Step 1.
if self.already_started.get() {
return;
}
// Step 2.
let was_parser_inserted = self.parser_inserted.get();
self.parser_inserted.set(false);
// Step 3.
let element = self.upcast::<Element>();
let r#async = element.has_attribute(&local_name!("async"));
// Note: confusingly, this is done if the element does *not* have an "async" attribute.
if was_parser_inserted && !r#async {
self.non_blocking.set(true);
}
// Step 4-5.
let text = self.Text();
if text.is_empty() && !element.has_attribute(&local_name!("src")) {
return;
}
// Step 6.
if !self.upcast::<Node>().is_connected() {
return;
}
let script_type = if let Some(ty) = self.get_script_type() {
ty
} else {
// Step 7.
return;
};
// Step 8.
if was_parser_inserted {
self.parser_inserted.set(true);
self.non_blocking.set(false);
}
// Step 9.
self.already_started.set(true);
// Step 10.
let doc = document_from_node(self);
if self.parser_inserted.get() && &*self.parser_document != &*doc {
return;
}
// Step 11.
if !doc.is_scripting_enabled() {
return;
}
// Step 12
if element.has_attribute(&local_name!("nomodule")) && script_type == ScriptType::Classic {
return;
}<|fim▁hole|> // Step 13.
if !element.has_attribute(&local_name!("src")) &&
doc.should_elements_inline_type_behavior_be_blocked(
&element,
csp::InlineCheckType::Script,
&text,
) == csp::CheckResult::Blocked
{
warn!("Blocking inline script due to CSP");
return;
}
// Step 14.
if script_type == ScriptType::Classic {
let for_attribute = element.get_attribute(&ns!(), &local_name!("for"));
let event_attribute = element.get_attribute(&ns!(), &local_name!("event"));
match (for_attribute, event_attribute) {
(Some(ref for_attribute), Some(ref event_attribute)) => {
let for_value = for_attribute.value().to_ascii_lowercase();
let for_value = for_value.trim_matches(HTML_SPACE_CHARACTERS);
if for_value != "window" {
return;
}
let event_value = event_attribute.value().to_ascii_lowercase();
let event_value = event_value.trim_matches(HTML_SPACE_CHARACTERS);
if event_value != "onload" && event_value != "onload()" {
return;
}
},
(_, _) => (),
}
}
// Step 15.
let encoding = element
.get_attribute(&ns!(), &local_name!("charset"))
.and_then(|charset| Encoding::for_label(charset.value().as_bytes()))
.unwrap_or_else(|| doc.encoding());
// Step 16.
let cors_setting = cors_setting_for_element(element);
// Step 17.
let credentials_mode = match script_type {
ScriptType::Classic => None,
ScriptType::Module => Some(reflect_cross_origin_attribute(element).map_or(
CredentialsMode::CredentialsSameOrigin,
|attr| match &*attr {
"use-credentials" => CredentialsMode::Include,
"anonymous" => CredentialsMode::CredentialsSameOrigin,
_ => CredentialsMode::CredentialsSameOrigin,
},
)),
};
// TODO: Step 18: Nonce.
// Step 19: Integrity metadata.
let im_attribute = element.get_attribute(&ns!(), &local_name!("integrity"));
let integrity_val = im_attribute.as_ref().map(|a| a.value());
let integrity_metadata = match integrity_val {
Some(ref value) => &***value,
None => "",
};
// TODO: Step 20: referrer policy
// TODO: Step 21: parser state.
// TODO: Step 22: Fetch options
// TODO: Step 23: environment settings object.
let base_url = doc.base_url();
if let Some(src) = element.get_attribute(&ns!(), &local_name!("src")) {
// Step 24.
// Step 24.1.
let src = src.value();
// Step 24.2.
if src.is_empty() {
self.queue_error_event();
return;
}
// Step 24.3: The "from an external file"" flag is stored in ScriptOrigin.
// Step 24.4-24.5.
let url = match base_url.join(&src) {
Ok(url) => url,
Err(_) => {
warn!("error parsing URL for script {}", &**src);
self.queue_error_event();
return;
},
};
// Step 24.6.
match script_type {
ScriptType::Classic => {
// Preparation for step 26.
let kind = if element.has_attribute(&local_name!("defer")) &&
was_parser_inserted &&
!r#async
{
// Step 26.a: classic, has src, has defer, was parser-inserted, is not async.
ExternalScriptKind::Deferred
} else if was_parser_inserted && !r#async {
// Step 26.c: classic, has src, was parser-inserted, is not async.
ExternalScriptKind::ParsingBlocking
} else if !r#async && !self.non_blocking.get() {
// Step 26.d: classic, has src, is not async, is not non-blocking.
ExternalScriptKind::AsapInOrder
} else {
// Step 26.f: classic, has src.
ExternalScriptKind::Asap
};
// Step 24.6.
fetch_a_classic_script(
self,
kind,
url,
cors_setting,
integrity_metadata.to_owned(),
encoding,
);
// Step 23.
match kind {
ExternalScriptKind::Deferred => doc.add_deferred_script(self),
ExternalScriptKind::ParsingBlocking => {
doc.set_pending_parsing_blocking_script(self, None)
},
ExternalScriptKind::AsapInOrder => doc.push_asap_in_order_script(self),
ExternalScriptKind::Asap => doc.add_asap_script(self),
}
},
ScriptType::Module => {
fetch_external_module_script(
ModuleOwner::Window(Trusted::new(self)),
url.clone(),
Destination::Script,
integrity_metadata.to_owned(),
credentials_mode.unwrap(),
);
if !r#async && was_parser_inserted {
doc.add_deferred_script(self);
} else if !r#async && !self.non_blocking.get() {
doc.push_asap_in_order_script(self);
} else {
doc.add_asap_script(self);
};
},
}
} else {
// Step 25.
assert!(!text.is_empty());
// Step 25-1. & 25-2.
let result = Ok(ScriptOrigin::internal(
text.clone(),
base_url.clone(),
script_type.clone(),
));
// Step 25-2.
match script_type {
ScriptType::Classic => {
if was_parser_inserted &&
doc.get_current_parser()
.map_or(false, |parser| parser.script_nesting_level() <= 1) &&
doc.get_script_blocking_stylesheets_count() > 0
{
// Step 26.h: classic, has no src, was parser-inserted, is blocked on stylesheet.
doc.set_pending_parsing_blocking_script(self, Some(result));
} else {
// Step 26.i: otherwise.
self.execute(result);
}
},
ScriptType::Module => {
// We should add inline module script elements
// into those vectors in case that there's no
// descendants in the inline module script.
if !r#async && was_parser_inserted {
doc.add_deferred_script(self);
} else if !r#async && !self.non_blocking.get() {
doc.push_asap_in_order_script(self);
} else {
doc.add_asap_script(self);
};
fetch_inline_module_script(
ModuleOwner::Window(Trusted::new(self)),
text.clone(),
base_url.clone(),
self.id.clone(),
credentials_mode.unwrap(),
);
},
}
}
}
fn unminify_js(&self, script: &mut ScriptOrigin) {
if !self.parser_document.window().unminify_js() {
return;
}
// Write the minified code to a temporary file and pass its path as an argument
// to js-beautify to read from. Meanwhile, redirect the process' stdout into
// another temporary file and read that into a string. This avoids some hangs
// observed on macOS when using direct input/output pipes with very large
// unminified content.
let (input, output) = (tempfile::NamedTempFile::new(), tempfile::tempfile());
if let (Ok(mut input), Ok(mut output)) = (input, output) {
input.write_all(script.text.as_bytes()).unwrap();
match Command::new("js-beautify")
.arg(input.path())
.stdout(output.try_clone().unwrap())
.status()
{
Ok(status) if status.success() => {
let mut script_content = String::new();
output.seek(std::io::SeekFrom::Start(0)).unwrap();
output.read_to_string(&mut script_content).unwrap();
script.text = DOMString::from(script_content);
},
_ => {
warn!("Failed to execute js-beautify. Will store unmodified script");
},
}
} else {
warn!("Error creating input and output files for unminify");
}
let path;
match window_from_node(self).unminified_js_dir() {
Some(unminified_js_dir) => path = PathBuf::from(unminified_js_dir),
None => {
warn!("Unminified script directory not found");
return;
},
}
let (base, has_name) = match script.url.as_str().ends_with("/") {
true => (
path.join(&script.url[url::Position::BeforeHost..])
.as_path()
.to_owned(),
false,
),
false => (
path.join(&script.url[url::Position::BeforeHost..])
.parent()
.unwrap()
.to_owned(),
true,
),
};
match create_dir_all(base.clone()) {
Ok(()) => debug!("Created base dir: {:?}", base),
Err(e) => {
debug!("Failed to create base dir: {:?}, {:?}", base, e);
return;
},
}
let path = if script.external && has_name {
// External script.
path.join(&script.url[url::Position::BeforeHost..])
} else {
// Inline script or url ends with '/'
base.join(Uuid::new_v4().to_string())
};
debug!("script will be stored in {:?}", path);
match File::create(&path) {
Ok(mut file) => file.write_all(script.text.as_bytes()).unwrap(),
Err(why) => warn!("Could not store script {:?}", why),
}
}
fn substitute_with_local_script(&self, script: &mut ScriptOrigin) {
if self
.parser_document
.window()
.local_script_source()
.is_none() ||
!script.external
{
return;
}
let mut path = PathBuf::from(
self.parser_document
.window()
.local_script_source()
.clone()
.unwrap(),
);
path = path.join(&script.url[url::Position::BeforeHost..]);
debug!("Attempting to read script stored at: {:?}", path);
match read_to_string(path.clone()) {
Ok(local_script) => {
debug!("Found script stored at: {:?}", path);
script.text = DOMString::from(local_script);
},
Err(why) => warn!("Could not restore script from file {:?}", why),
}
}
/// <https://html.spec.whatwg.org/multipage/#execute-the-script-block>
pub fn execute(&self, result: ScriptResult) {
// Step 1.
let doc = document_from_node(self);
if self.parser_inserted.get() && &*doc != &*self.parser_document {
return;
}
let mut script = match result {
// Step 2.
Err(e) => {
warn!("error loading script {:?}", e);
self.dispatch_error_event();
return;
},
Ok(script) => script,
};
if script.type_ == ScriptType::Classic {
self.unminify_js(&mut script);
self.substitute_with_local_script(&mut script);
}
// Step 3.
let neutralized_doc = if script.external || script.type_ == ScriptType::Module {
debug!("loading external script, url = {}", script.url);
let doc = document_from_node(self);
doc.incr_ignore_destructive_writes_counter();
Some(doc)
} else {
None
};
// Step 4.
let document = document_from_node(self);
let old_script = document.GetCurrentScript();
match script.type_ {
ScriptType::Classic => document.set_current_script(Some(self)),
ScriptType::Module => document.set_current_script(None),
}
match script.type_ {
ScriptType::Classic => {
self.run_a_classic_script(&script);
document.set_current_script(old_script.as_deref());
},
ScriptType::Module => {
assert!(document.GetCurrentScript().is_none());
self.run_a_module_script(&script, false);
},
}
// Step 5.
if let Some(doc) = neutralized_doc {
doc.decr_ignore_destructive_writes_counter();
}
// Step 6.
if script.external {
self.dispatch_load_event();
}
}
// https://html.spec.whatwg.org/multipage/#run-a-classic-script
pub fn run_a_classic_script(&self, script: &ScriptOrigin) {
// TODO use a settings object rather than this element's document/window
// Step 2
let document = document_from_node(self);
if !document.is_fully_active() || !document.is_scripting_enabled() {
return;
}
// Steps 4-10
let window = window_from_node(self);
let line_number = if script.external {
1
} else {
self.line_number as u32
};
rooted!(in(*window.get_cx()) let mut rval = UndefinedValue());
let global = window.upcast::<GlobalScope>();
global.evaluate_script_on_global_with_result(
&script.text,
script.url.as_str(),
rval.handle_mut(),
line_number,
);
}
#[allow(unsafe_code)]
/// https://html.spec.whatwg.org/multipage/#run-a-module-script
pub fn run_a_module_script(&self, script: &ScriptOrigin, _rethrow_errors: bool) {
// TODO use a settings object rather than this element's document/window
// Step 2
let document = document_from_node(self);
if !document.is_fully_active() || !document.is_scripting_enabled() {
return;
}
// Step 4
let window = window_from_node(self);
let global = window.upcast::<GlobalScope>();
let _aes = AutoEntryScript::new(&global);
if script.external {
let module_map = global.get_module_map().borrow();
if let Some(module_tree) = module_map.get(&script.url) {
// Step 6.
{
let module_error = module_tree.get_error().borrow();
if module_error.is_some() {
module_tree.report_error(&global);
return;
}
}
let module_record = module_tree.get_record().borrow();
if let Some(record) = &*module_record {
let evaluated = module_tree.execute_module(global, record.handle());
if let Err(exception) = evaluated {
module_tree.set_error(Some(exception.clone()));
module_tree.report_error(&global);
return;
}
}
}
} else {
let inline_module_map = global.get_inline_module_map().borrow();
if let Some(module_tree) = inline_module_map.get(&self.id.clone()) {
// Step 6.
{
let module_error = module_tree.get_error().borrow();
if module_error.is_some() {
module_tree.report_error(&global);
return;
}
}
let module_record = module_tree.get_record().borrow();
if let Some(record) = &*module_record {
let evaluated = module_tree.execute_module(global, record.handle());
if let Err(exception) = evaluated {
module_tree.set_error(Some(exception.clone()));
module_tree.report_error(&global);
return;
}
}
}
}
}
pub fn queue_error_event(&self) {
let window = window_from_node(self);
window
.task_manager()
.dom_manipulation_task_source()
.queue_simple_event(self.upcast(), atom!("error"), &window);
}
pub fn dispatch_load_event(&self) {
self.dispatch_event(
atom!("load"),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
);
}
pub fn dispatch_error_event(&self) {
self.dispatch_event(
atom!("error"),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
);
}
// https://html.spec.whatwg.org/multipage/#prepare-a-script Step 7.
pub fn get_script_type(&self) -> Option<ScriptType> {
let element = self.upcast::<Element>();
let type_attr = element.get_attribute(&ns!(), &local_name!("type"));
let language_attr = element.get_attribute(&ns!(), &local_name!("language"));
let script_type = match (
type_attr.as_ref().map(|t| t.value()),
language_attr.as_ref().map(|l| l.value()),
) {
(Some(ref ty), _) if ty.is_empty() => {
debug!("script type empty, inferring js");
Some(ScriptType::Classic)
},
(None, Some(ref lang)) if lang.is_empty() => {
debug!("script type empty, inferring js");
Some(ScriptType::Classic)
},
(None, None) => {
debug!("script type empty, inferring js");
Some(ScriptType::Classic)
},
(None, Some(ref lang)) => {
debug!("script language={}", &***lang);
let language = format!("text/{}", &***lang);
if SCRIPT_JS_MIMES.contains(&language.to_ascii_lowercase().as_str()) {
Some(ScriptType::Classic)
} else {
None
}
},
(Some(ref ty), _) => {
debug!("script type={}", &***ty);
if &***ty == String::from("module") {
return Some(ScriptType::Module);
}
if SCRIPT_JS_MIMES
.contains(&ty.to_ascii_lowercase().trim_matches(HTML_SPACE_CHARACTERS))
{
Some(ScriptType::Classic)
} else {
None
}
},
};
// https://github.com/rust-lang/rust/issues/21114
script_type
}
pub fn set_parser_inserted(&self, parser_inserted: bool) {
self.parser_inserted.set(parser_inserted);
}
pub fn get_parser_inserted(&self) -> bool {
self.parser_inserted.get()
}
pub fn set_already_started(&self, already_started: bool) {
self.already_started.set(already_started);
}
pub fn get_non_blocking(&self) -> bool {
self.non_blocking.get()
}
fn dispatch_event(
&self,
type_: Atom,
bubbles: EventBubbles,
cancelable: EventCancelable,
) -> EventStatus {
let window = window_from_node(self);
let event = Event::new(window.upcast(), type_, bubbles, cancelable);
event.fire(self.upcast())
}
}
impl VirtualMethods for HTMLScriptElement {
fn super_type(&self) -> Option<&dyn VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &dyn VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match *attr.local_name() {
local_name!("src") => {
if let AttributeMutation::Set(_) = mutation {
if !self.parser_inserted.get() && self.upcast::<Node>().is_connected() {
self.prepare();
}
}
},
_ => {},
}
}
fn children_changed(&self, mutation: &ChildrenMutation) {
if let Some(ref s) = self.super_type() {
s.children_changed(mutation);
}
if !self.parser_inserted.get() && self.upcast::<Node>().is_connected() {
self.prepare();
}
}
fn bind_to_tree(&self, context: &BindContext) {
if let Some(ref s) = self.super_type() {
s.bind_to_tree(context);
}
if context.tree_connected && !self.parser_inserted.get() {
let script = Trusted::new(self);
document_from_node(self).add_delayed_task(task!(ScriptDelayedInitialize: move || {
script.root().prepare();
}));
}
}
fn cloning_steps(
&self,
copy: &Node,
maybe_doc: Option<&Document>,
clone_children: CloneChildrenFlag,
) {
if let Some(ref s) = self.super_type() {
s.cloning_steps(copy, maybe_doc, clone_children);
}
// https://html.spec.whatwg.org/multipage/#already-started
if self.already_started.get() {
copy.downcast::<HTMLScriptElement>()
.unwrap()
.set_already_started(true);
}
}
}
impl HTMLScriptElementMethods for HTMLScriptElement {
// https://html.spec.whatwg.org/multipage/#dom-script-src
make_url_getter!(Src, "src");
// https://html.spec.whatwg.org/multipage/#dom-script-src
make_url_setter!(SetSrc, "src");
// https://html.spec.whatwg.org/multipage/#dom-script-type
make_getter!(Type, "type");
// https://html.spec.whatwg.org/multipage/#dom-script-type
make_setter!(SetType, "type");
// https://html.spec.whatwg.org/multipage/#dom-script-charset
make_getter!(Charset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-script-charset
make_setter!(SetCharset, "charset");
// https://html.spec.whatwg.org/multipage/#dom-script-async
fn Async(&self) -> bool {
self.non_blocking.get() ||
self.upcast::<Element>()
.has_attribute(&local_name!("async"))
}
// https://html.spec.whatwg.org/multipage/#dom-script-async
fn SetAsync(&self, value: bool) {
self.non_blocking.set(false);
self.upcast::<Element>()
.set_bool_attribute(&local_name!("async"), value);
}
// https://html.spec.whatwg.org/multipage/#dom-script-defer
make_bool_getter!(Defer, "defer");
// https://html.spec.whatwg.org/multipage/#dom-script-defer
make_bool_setter!(SetDefer, "defer");
// https://html.spec.whatwg.org/multipage/#dom-script-nomodule
make_bool_getter!(NoModule, "nomodule");
// https://html.spec.whatwg.org/multipage/#dom-script-nomodule
make_bool_setter!(SetNoModule, "nomodule");
// https://html.spec.whatwg.org/multipage/#dom-script-integrity
make_getter!(Integrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-script-integrity
make_setter!(SetIntegrity, "integrity");
// https://html.spec.whatwg.org/multipage/#dom-script-event
make_getter!(Event, "event");
// https://html.spec.whatwg.org/multipage/#dom-script-event
make_setter!(SetEvent, "event");
// https://html.spec.whatwg.org/multipage/#dom-script-htmlfor
make_getter!(HtmlFor, "for");
// https://html.spec.whatwg.org/multipage/#dom-script-htmlfor
make_setter!(SetHtmlFor, "for");
// https://html.spec.whatwg.org/multipage/#dom-script-crossorigin
fn GetCrossOrigin(&self) -> Option<DOMString> {
reflect_cross_origin_attribute(self.upcast::<Element>())
}
// https://html.spec.whatwg.org/multipage/#dom-script-crossorigin
fn SetCrossOrigin(&self, value: Option<DOMString>) {
set_cross_origin_attribute(self.upcast::<Element>(), value);
}
// https://html.spec.whatwg.org/multipage/#dom-script-text
fn Text(&self) -> DOMString {
self.upcast::<Node>().child_text_content()
}
// https://html.spec.whatwg.org/multipage/#dom-script-text
fn SetText(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value))
}
}
#[derive(Clone, Copy)]
enum ExternalScriptKind {
Deferred,
ParsingBlocking,
AsapInOrder,
Asap,
}<|fim▁end|> | |
<|file_name|>github.py<|end_file_name|><|fim▁begin|>"""
Github Authentication
"""
import httplib2
from django.conf import settings
from django.core.mail import send_mail
from oauth2client.client import OAuth2WebServerFlow
from helios_auth import utils
# some parameters to indicate that status updating is not possible
STATUS_UPDATES = False
# display tweaks
LOGIN_MESSAGE = "Log in with GitHub"
def get_flow(redirect_url=None):
return OAuth2WebServerFlow(
client_id=settings.GH_CLIENT_ID,
client_secret=settings.GH_CLIENT_SECRET,
scope='read:user user:email',
auth_uri="https://github.com/login/oauth/authorize",
token_uri="https://github.com/login/oauth/access_token",
redirect_uri=redirect_url,
)
def get_auth_url(request, redirect_url):
flow = get_flow(redirect_url)
request.session['gh_redirect_uri'] = redirect_url
return flow.step1_get_authorize_url()
def get_user_info_after_auth(request):
redirect_uri = request.session['gh_redirect_uri']
del request.session['gh_redirect_uri']
flow = get_flow(redirect_uri)
if 'code' not in request.GET:
return None
code = request.GET['code']
credentials = flow.step2_exchange(code)
http = httplib2.Http(".cache")
http = credentials.authorize(http)
(_, content) = http.request("https://api.github.com/user", "GET")
response = utils.from_json(content.decode('utf-8'))
user_id = response['login']
user_name = response['name']
(_, content) = http.request("https://api.github.com/user/emails", "GET")
response = utils.from_json(content.decode('utf-8'))
user_email = None<|fim▁hole|> if email['verified'] and email['primary']:
user_email = email['email']
break
if not user_email:
raise Exception("email address with GitHub not verified")
return {
'type': 'github',
'user_id': user_id,
'name': '%s (%s)' % (user_id, user_name),
'info': {'email': user_email},
'token': {},
}
def do_logout(user):
return None
def update_status(token, message):
pass
def send_message(user_id, name, user_info, subject, body):
send_mail(
subject,
body,
settings.SERVER_EMAIL,
["%s <%s>" % (user_id, user_info['email'])],
fail_silently=False,
)
def check_constraint(eligibility, user_info):
pass
#
# Election Creation
#
def can_create_election(user_id, user_info):
return True<|fim▁end|> | for email in response: |
<|file_name|>projectownership.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import operator
from django.db import models
from django.db.models import Q
from django.utils import timezone
from sentry.db.models import Model, sane_repr
from sentry.db.models.fields import FlexibleForeignKey, JSONField
from sentry.ownership.grammar import load_schema
from functools import reduce
class ProjectOwnership(Model):
__core__ = True
project = FlexibleForeignKey("sentry.Project", unique=True)
raw = models.TextField(null=True)
schema = JSONField(null=True)
fallthrough = models.BooleanField(default=True)
auto_assignment = models.BooleanField(default=False)
date_created = models.DateTimeField(default=timezone.now)
last_updated = models.DateTimeField(default=timezone.now)
is_active = models.BooleanField(default=True)
# An object to indicate ownership is implicitly everyone
Everyone = object()
class Meta:
app_label = "sentry"
db_table = "sentry_projectownership"
__repr__ = sane_repr("project_id", "is_active")
@classmethod
def get_owners(cls, project_id, data):
"""
For a given project_id, and event data blob.
If Everyone is returned, this means we implicitly are
falling through our rules and everyone is responsible.
If an empty list is returned, this means there are explicitly
no owners.
"""
try:
ownership = cls.objects.get(project_id=project_id)
except cls.DoesNotExist:
ownership = cls(project_id=project_id)
rules = cls._matching_ownership_rules(ownership, project_id, data)
if not rules:
return cls.Everyone if ownership.fallthrough else [], None
owners = {o for rule in rules for o in rule.owners}
return filter(None, resolve_actors(owners, project_id).values()), rules
@classmethod
def get_autoassign_owner(cls, project_id, data):
"""
Get the auto-assign owner for a project if there are any.
Will return None if there are no owners, or a list of owners.
"""
try:
ownership = cls.objects.get(project_id=project_id)
except cls.DoesNotExist:
return None
if not ownership.auto_assignment:
return None
rules = cls._matching_ownership_rules(ownership, project_id, data)
if not rules:
return None
score = 0
owners = None
# Automatic assignment prefers the owner with the longest
# matching pattern as the match is more specific.
for rule in rules:
candidate = len(rule.matcher.pattern)
if candidate > score:
score = candidate
owners = rule.owners
actors = filter(None, resolve_actors(owners, project_id).values())
# Can happen if the ownership rule references a user/team that no longer
# is assigned to the project or has been removed from the org.<|fim▁hole|>
@classmethod
def _matching_ownership_rules(cls, ownership, project_id, data):
rules = []
if ownership.schema is not None:
for rule in load_schema(ownership.schema):
if rule.test(data):
rules.append(rule)
return rules
def resolve_actors(owners, project_id):
""" Convert a list of Owner objects into a dictionary
of {Owner: Actor} pairs. Actors not identified are returned
as None. """
from sentry.api.fields.actor import Actor
from sentry.models import User, Team
if not owners:
return {}
users, teams = [], []
owners_lookup = {}
for owner in owners:
# teams aren't technical case insensitive, but teams also
# aren't allowed to have non-lowercase in slugs, so
# this kinda works itself out correctly since they won't match
owners_lookup[(owner.type, owner.identifier.lower())] = owner
if owner.type == "user":
users.append(owner)
elif owner.type == "team":
teams.append(owner)
actors = {}
if users:
actors.update(
{
("user", email.lower()): Actor(u_id, User)
for u_id, email in User.objects.filter(
reduce(operator.or_, [Q(emails__email__iexact=o.identifier) for o in users]),
# We don't require verified emails
# emails__is_verified=True,
is_active=True,
sentry_orgmember_set__organizationmemberteam__team__projectteam__project_id=project_id,
)
.distinct()
.values_list("id", "emails__email")
}
)
if teams:
actors.update(
{
("team", slug): Actor(t_id, Team)
for t_id, slug in Team.objects.filter(
slug__in=[o.identifier for o in teams], projectteam__project_id=project_id
).values_list("id", "slug")
}
)
return {o: actors.get((o.type, o.identifier.lower())) for o in owners}<|fim▁end|> | if not actors:
return None
return actors[0].resolve() |
<|file_name|>ecc.go<|end_file_name|><|fim▁begin|>package ecc
import (
"bytes"
b64 "encoding/base64"
"encoding/json"
"errors"
"flag"
"fmt"
"io/ioutil"
"log"
"net"
"net/http"
"os"
"reflect"
"time"
"github.com/socketplane/ecc/Godeps/_workspace/src/github.com/hashicorp/consul/api"
"github.com/socketplane/ecc/Godeps/_workspace/src/github.com/hashicorp/consul/command"
"github.com/socketplane/ecc/Godeps/_workspace/src/github.com/hashicorp/consul/watch"
"github.com/socketplane/ecc/Godeps/_workspace/src/github.com/mitchellh/cli"
)
// Embedded Consul Client
// Quick and Dirty way to embed Consul with any golang based application without the
// additional step of installing the Consul application in the host system
// Consul Agent related functions
var started bool
var OfflineSupport bool = true
var listener eccListener
func Start(serverMode bool, bootstrap bool, bindInterface string, dataDir string) error {
bindAddress := ""
if bindInterface != "" {
intf, err := net.InterfaceByName(bindInterface)
if err != nil {
log.Printf("Error : %v", err)
return err
}
addrs, err := intf.Addrs()
if err == nil {
for i := 0; i < len(addrs); i++ {
addr := addrs[i].String()
ip, _, _ := net.ParseCIDR(addr)
if ip != nil && ip.To4() != nil {
bindAddress = ip.To4().String()
}
}
}
}
errCh := make(chan int)
watchForExistingRegisteredUpdates()
go RegisterForNodeUpdates(listener)
go startConsul(serverMode, bootstrap, bindAddress, dataDir, errCh)
select {
case <-errCh:
return errors.New("Error starting Consul Agent")
case <-time.After(time.Second * 5):
}
return nil
}
func startConsul(serverMode bool, bootstrap bool, bindAddress string, dataDir string, eCh chan int) {
args := []string{"agent", "-data-dir", dataDir}
if serverMode {
args = append(args, "-server")
}
if bootstrap {
args = append(args, "-bootstrap")
}
if bindAddress != "" {
args = append(args, "-bind")
args = append(args, bindAddress)
args = append(args, "-advertise")
args = append(args, bindAddress)
}
ret := Execute(args...)
eCh <- ret
}
func HasStarted() bool {
return started
}
func Join(address string) error {
ret := Execute("join", address)
if ret != 0 {
log.Println("Error (%d) joining %s with Consul peers", ret, address)
return errors.New("Error adding member")
}
return nil
}
func Leave() error {
stopWatches()
ret := Execute("leave")
if ret != 0 {
log.Println("Error Leaving Consul membership")
return errors.New("Error leaving Consul cluster")
}
return nil
}
// Execute function is borrowed from Consul's main.go
func Execute(args ...string) int {
for _, arg := range args {
if arg == "-v" || arg == "--version" {
newArgs := make([]string, len(args)+1)
newArgs[0] = "version"
copy(newArgs[1:], args)
args = newArgs
break
}
}
cli := &cli.CLI{
Args: args,
Commands: Commands,
HelpFunc: cli.BasicHelpFunc("consul"),
}
exitCode, err := cli.Run()
if err != nil {
fmt.Fprintf(os.Stderr, "Error executing CLI: %s\n", err.Error())
return 1
}
return exitCode
}
const CONSUL_BASE_URL = "http://localhost:8500/v1/"
func ConsulGet(url string) (string, bool) {
resp, err := http.Get(url)
if err != nil {
log.Printf("Error (%v) in Get for %s\n", err, url)
return "", false
}
defer resp.Body.Close()
log.Printf("Status of Get %s %d for %s", resp.Status, resp.StatusCode, url)
if resp.StatusCode >= 200 && resp.StatusCode < 300 {
var jsonBody []consulBody
body, err := ioutil.ReadAll(resp.Body)
err = json.Unmarshal(body, &jsonBody)
existingValue, err := b64.StdEncoding.DecodeString(jsonBody[0].Value)
if err != nil {
return "", false
}
return string(existingValue[:]), true
} else {
return "", false
}
}
// Consul KV Store related
const CONSUL_KV_BASE_URL = "http://localhost:8500/v1/kv/"
type consulBody struct {
CreateIndex int `json:"CreateIndex,omitempty"`
ModifyIndex int `json:"ModifyIndex,omitempty"`
Key string `json:"Key,omitempty"`
Flags int `json:"Flags,omitempty"`
Value string `json:"Value,omitempty"`
}
func GetAll(store string) ([][]byte, []int, bool) {
if OfflineSupport && !started {
return getAllFromCache(store)
}
url := CONSUL_KV_BASE_URL + store + "?recurse"
resp, err := http.Get(url)
if err != nil {
log.Printf("Error (%v) in Get for %s\n", err, url)
return nil, nil, false
}
defer resp.Body.Close()
log.Printf("Status of Get %s %d for %s", resp.Status, resp.StatusCode, url)
if resp.StatusCode >= 400 && resp.StatusCode < 500 {
return nil, nil, false
} else if resp.StatusCode >= 200 && resp.StatusCode < 300 {
var jsonBody []consulBody
valueArr := make([][]byte, 0)
indexArr := make([]int, 0)
body, _ := ioutil.ReadAll(resp.Body)
err = json.Unmarshal(body, &jsonBody)
for _, body := range jsonBody {
existingValue, _ := b64.StdEncoding.DecodeString(body.Value)
valueArr = append(valueArr, existingValue)
indexArr = append(indexArr, body.ModifyIndex)
}
return valueArr, indexArr, true
} else {
return nil, nil, false
}
}
func Get(store string, key string) ([]byte, int, bool) {
if OfflineSupport && !started {
return getFromCache(store, key)
}
url := CONSUL_KV_BASE_URL + store + "/" + key
resp, err := http.Get(url)
if err != nil {
log.Printf("Error (%v) in Get for %s\n", err, url)
return nil, 0, false
}
defer resp.Body.Close()
log.Printf("Status of Get %s %d for %s", resp.Status, resp.StatusCode, url)
if resp.StatusCode >= 400 && resp.StatusCode < 500 {
return nil, 0, false
} else if resp.StatusCode >= 200 && resp.StatusCode < 300 {
var jsonBody []consulBody
body, err := ioutil.ReadAll(resp.Body)
err = json.Unmarshal(body, &jsonBody)
existingValue, err := b64.StdEncoding.DecodeString(jsonBody[0].Value)
if err != nil {
return nil, jsonBody[0].ModifyIndex, false
}
return existingValue, jsonBody[0].ModifyIndex, true
} else {
return nil, 0, false
}
}
const (
OK = iota
OUTDATED
ERROR
)
type eccerror int
func Put(store string, key string, value []byte, oldValue []byte) eccerror {
if OfflineSupport && !started {
return putInCache(store, key, value, oldValue)
}
existingValue, casIndex, ok := Get(store, key)
if ok && !bytes.Equal(oldValue, existingValue) {
return OUTDATED
}
url := fmt.Sprintf("%s%s/%s?cas=%d", CONSUL_KV_BASE_URL, store, key, casIndex)
log.Printf("Updating KV pair for %s %s %s %d", url, key, value, casIndex)
req, err := http.NewRequest("PUT", url, bytes.NewBuffer(value))
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
log.Println("Error creating KV pair for ", url, err)
return ERROR
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
if string(body) == "false" {
// Let the application retry based on return value
// return Put(store, key, value, oldValue)
return OUTDATED
}
return OK
}
func Delete(store string, key string) eccerror {
if OfflineSupport && !started {
return deleteFromCache(store, key)
}
url := fmt.Sprintf("%s%s/%s", CONSUL_KV_BASE_URL, store, key)
log.Printf("Deleting KV pair for %s", url)
req, err := http.NewRequest("DELETE", url, nil)
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
log.Println("Error creating KV pair for ", url, err)
return ERROR
}
defer resp.Body.Close()
body, _ := ioutil.ReadAll(resp.Body)
log.Println(string(body))
return OK
}
type Store struct {
cache map[string][]byte
}
// Local KV store cache for bootstrap node consul connection issues
var cache map[string]Store = make(map[string]Store)
func getAllFromCache(storeName string) ([][]byte, []int, bool) {
store, ok := cache[storeName]
if !ok {
return nil, nil, false
}
vals := make([][]byte, 0)
for _, val := range store.cache {
vals = append(vals, val)
}
return vals, nil, true
}
func getFromCache(storeName string, key string) ([]byte, int, bool) {
store, ok := cache[storeName]
if !ok {
return nil, 0, false
}
val, ok := store.cache[key]
return val, 0, ok
}
func putInCache(storeName string, key string, value []byte, oldValue []byte) eccerror {
store, ok := cache[storeName]
if !ok {
store = Store{make(map[string][]byte)}
cache[storeName] = store
}
store.cache[key] = value
return OK
}
func deleteFromCache(storeName string, key string) eccerror {
store, ok := cache[storeName]
if ok {
delete(store.cache, key)
}
return OK
}
func populateKVStoreFromCache() {
if !OfflineSupport || started {
return
}
started = true
for storeName, store := range cache {
for key, val := range store.cache {
go Put(storeName, key, val, nil)
}
delete(cache, storeName)
}
}
// Watch related
const (
NOTIFY_UPDATE_ADD = iota
NOTIFY_UPDATE_MODIFY
NOTIFY_UPDATE_DELETE
)
type NotifyUpdateType int
const (
WATCH_TYPE_NODE = iota
WATCH_TYPE_KEY
WATCH_TYPE_STORE
WATCH_TYPE_EVENT
)
type WatchType int
type watchData struct {
listeners map[string][]Listener
watchPlans []*watch.WatchPlan
}
var watches map[WatchType]watchData = make(map[WatchType]watchData)
type Listener interface {
NotifyNodeUpdate(NotifyUpdateType, string)
NotifyKeyUpdate(NotifyUpdateType, string, []byte)
NotifyStoreUpdate(NotifyUpdateType, string, map[string][]byte)
}
type eccListener struct {
}
func (e eccListener) NotifyNodeUpdate(nType NotifyUpdateType, nodeAddress string) {
if nType == NOTIFY_UPDATE_ADD && !started {
populateKVStoreFromCache()
}
}
func (e eccListener) NotifyKeyUpdate(nType NotifyUpdateType, key string, data []byte) {
}
func (e eccListener) NotifyStoreUpdate(nType NotifyUpdateType, store string, data map[string][]byte) {
}
func contains(wtype WatchType, key string, elem interface{}) bool {
ws, ok := watches[wtype]
if !ok {
return false
}
list, ok := ws.listeners[key]
if !ok {
return false
}
v := reflect.ValueOf(list)
for i := 0; i < v.Len(); i++ {
if v.Index(i).Interface() == elem {
return true
}
}
return false
}
type watchconsul bool
func addListener(wtype WatchType, key string, listener Listener) watchconsul {
var wc watchconsul = false
if !contains(WATCH_TYPE_NODE, key, listener) {
ws, ok := watches[wtype]
if !ok {
watches[wtype] = watchData{make(map[string][]Listener), make([]*watch.WatchPlan, 0)}
ws = watches[wtype]
}
listeners, ok := ws.listeners[key]
if !ok {
listeners = make([]Listener, 0)
wc = true
}
ws.listeners[key] = append(listeners, listener)
}
return wc
}
func getListeners(wtype WatchType, key string) []Listener {
ws, ok := watches[wtype]
if !ok {
return nil
}
list, ok := ws.listeners[key]
if ok {
return list
}
return nil
}
func addWatchPlan(wtype WatchType, wp *watch.WatchPlan) {
ws, ok := watches[wtype]
if !ok {
return
}
ws.watchPlans = append(ws.watchPlans, wp)
watches[wtype] = ws
}
func stopWatches() {
for _, ws := range watches {
for _, wp := range ws.watchPlans {
wp.Stop()
}
ws.watchPlans = ws.watchPlans[:0]
}
}
func register(wtype WatchType, params map[string]interface{}, handler watch.HandlerFunc) {
// Create the watch
wp, err := watch.Parse(params)
if err != nil {
fmt.Printf("Register error : %s", err)
return
}
addWatchPlan(wtype, wp)
wp.Handler = handler
cmdFlags := flag.NewFlagSet("watch", flag.ContinueOnError)
httpAddr := command.HTTPAddrFlag(cmdFlags)
// Run the watch
if err := wp.Run(*httpAddr); err != nil {
fmt.Printf("Error querying Consul agent: %s", err)
}
}
var nodeCache []*api.Node
func compare(X, Y []*api.Node) []*api.Node {
m := make(map[string]bool)
for _, y := range Y {
m[y.Address] = true
}
var ret []*api.Node
for _, x := range X {
if m[x.Address] {
continue
}
ret = append(ret, x)
}
return ret
}
func updateNodeListeners(clusterNodes []*api.Node) {
toDelete := compare(nodeCache, clusterNodes)
toAdd := compare(clusterNodes, nodeCache)
nodeCache = clusterNodes
listeners := getListeners(WATCH_TYPE_NODE, "")
if listeners == nil {
return
}
for _, deleteNode := range toDelete {
for _, listener := range listeners {
listener.NotifyNodeUpdate(NOTIFY_UPDATE_DELETE, deleteNode.Address)
}
}
for _, addNode := range toAdd {
for _, listener := range listeners {
listener.NotifyNodeUpdate(NOTIFY_UPDATE_ADD, addNode.Address)<|fim▁hole|>func updateKeyListeners(idx uint64, key string, data interface{}) {
listeners := getListeners(WATCH_TYPE_KEY, key)
if listeners == nil {
return
}
var kv *api.KVPair = nil
var val []byte = nil
updateType := NOTIFY_UPDATE_MODIFY
if data != nil {
kv = data.(*api.KVPair)
}
if kv == nil {
updateType = NOTIFY_UPDATE_DELETE
} else {
updateType = NOTIFY_UPDATE_MODIFY
if idx == kv.CreateIndex {
updateType = NOTIFY_UPDATE_ADD
}
val = kv.Value
}
for _, listener := range listeners {
listener.NotifyKeyUpdate(NotifyUpdateType(updateType), key, val)
}
}
func registerForNodeUpdates() {
// Compile the watch parameters
params := make(map[string]interface{})
params["type"] = "nodes"
handler := func(idx uint64, data interface{}) {
updateNodeListeners(data.([]*api.Node))
}
register(WATCH_TYPE_NODE, params, handler)
}
func RegisterForNodeUpdates(listener Listener) {
wc := addListener(WATCH_TYPE_NODE, "", listener)
if wc {
registerForNodeUpdates()
}
}
func registerForKeyUpdates(absKey string) {
params := make(map[string]interface{})
params["type"] = "key"
params["key"] = absKey
handler := func(idx uint64, data interface{}) {
updateKeyListeners(idx, absKey, data)
}
register(WATCH_TYPE_KEY, params, handler)
}
func RegisterForKeyUpdates(store string, key string, listener Listener) {
absKey := store + "/" + key
wc := addListener(WATCH_TYPE_KEY, absKey, listener)
if wc {
registerForKeyUpdates(absKey)
}
}
func registerForStoreUpdates(store string) {
// Compile the watch parameters
params := make(map[string]interface{})
params["type"] = "keyprefix"
params["prefix"] = store + "/"
handler := func(idx uint64, data interface{}) {
fmt.Println("NOT IMPLEMENTED Store Update :", idx, data)
}
register(WATCH_TYPE_STORE, params, handler)
}
func RegisterForStoreUpdates(store string, listener Listener) {
wc := addListener(WATCH_TYPE_STORE, store, listener)
if wc {
registerForStoreUpdates(store)
}
}
func watchForExistingRegisteredUpdates() {
for wType, ws := range watches {
log.Println("watchForExistingRegisteredUpdates : ", wType)
for key, _ := range ws.listeners {
log.Println("key : ", key)
switch wType {
case WATCH_TYPE_NODE:
go registerForNodeUpdates()
case WATCH_TYPE_KEY:
go registerForKeyUpdates(key)
case WATCH_TYPE_STORE:
go registerForStoreUpdates(key)
}
}
}
}<|fim▁end|> | }
}
}
|
<|file_name|>http_module.ngfactory.ts<|end_file_name|><|fim▁begin|>/**
* @fileoverview This file is generated by the Angular 2 template compiler.
* Do not edit.
* @suppress {suspiciousCode,uselessCode,missingProperties}
*/
/* tslint:disable */
import * as import0 from '@angular/core/src/linker/ng_module_factory';
import * as import1 from '@angular/http/src/http_module';
import * as import2 from '@angular/http/src/backends/browser_xhr';
import * as import3 from '@angular/http/src/base_response_options';
import * as import4 from '@angular/http/src/backends/xhr_backend';
import * as import5 from '@angular/http/src/base_request_options';
import * as import6 from '@angular/core/src/di/injector';
import * as import7 from '@angular/http/src/interfaces';
import * as import8 from '@angular/http/src/http';
import * as import9 from '@angular/http/src/backends/browser_jsonp';
import * as import10 from '@angular/http/src/backends/jsonp_backend';
class HttpModuleInjector extends import0.NgModuleInjector<import1.HttpModule> {
_HttpModule_0:import1.HttpModule;
__BrowserXhr_1:import2.BrowserXhr;
__ResponseOptions_2:import3.BaseResponseOptions;
__XSRFStrategy_3:any;
__XHRBackend_4:import4.XHRBackend;
__RequestOptions_5:import5.BaseRequestOptions;
__Http_6:any;
constructor(parent:import6.Injector) {
super(parent,([] as any[]),([] as any[]));
}
get _BrowserXhr_1():import2.BrowserXhr {
if ((this.__BrowserXhr_1 == null)) { (this.__BrowserXhr_1 = new import2.BrowserXhr()); }
return this.__BrowserXhr_1;
}
get _ResponseOptions_2():import3.BaseResponseOptions {
if ((this.__ResponseOptions_2 == null)) { (this.__ResponseOptions_2 = new import3.BaseResponseOptions()); }
return this.__ResponseOptions_2;
}
get _XSRFStrategy_3():any {
if ((this.__XSRFStrategy_3 == null)) { (this.__XSRFStrategy_3 = import1._createDefaultCookieXSRFStrategy()); }
return this.__XSRFStrategy_3;
}
get _XHRBackend_4():import4.XHRBackend {
if ((this.__XHRBackend_4 == null)) { (this.__XHRBackend_4 = new import4.XHRBackend(this._BrowserXhr_1,this._ResponseOptions_2,this._XSRFStrategy_3)); }
return this.__XHRBackend_4;
}
get _RequestOptions_5():import5.BaseRequestOptions {
if ((this.__RequestOptions_5 == null)) { (this.__RequestOptions_5 = new import5.BaseRequestOptions()); }
return this.__RequestOptions_5;
}
get _Http_6():any {
if ((this.__Http_6 == null)) { (this.__Http_6 = import1.httpFactory(this._XHRBackend_4,this._RequestOptions_5)); }
return this.__Http_6;
}
createInternal():import1.HttpModule {
this._HttpModule_0 = new import1.HttpModule();
return this._HttpModule_0;
}
getInternal(token:any,notFoundResult:any):any {
if ((token === import1.HttpModule)) { return this._HttpModule_0; }
if ((token === import2.BrowserXhr)) { return this._BrowserXhr_1; }
if ((token === import3.ResponseOptions)) { return this._ResponseOptions_2; }
if ((token === import7.XSRFStrategy)) { return this._XSRFStrategy_3; }
if ((token === import4.XHRBackend)) { return this._XHRBackend_4; }
if ((token === import5.RequestOptions)) { return this._RequestOptions_5; }
if ((token === import8.Http)) { return this._Http_6; }
return notFoundResult;
}
destroyInternal():void {
}
}
export const HttpModuleNgFactory:import0.NgModuleFactory<import1.HttpModule> = new import0.NgModuleFactory(HttpModuleInjector,import1.HttpModule);
class JsonpModuleInjector extends import0.NgModuleInjector<import1.JsonpModule> {
_JsonpModule_0:import1.JsonpModule;
__BrowserJsonp_1:import9.BrowserJsonp;
__ResponseOptions_2:import3.BaseResponseOptions;
__JSONPBackend_3:import10.JSONPBackend_;
__RequestOptions_4:import5.BaseRequestOptions;
__Jsonp_5:any;
constructor(parent:import6.Injector) {
super(parent,([] as any[]),([] as any[]));
}
get _BrowserJsonp_1():import9.BrowserJsonp {
if ((this.__BrowserJsonp_1 == null)) { (this.__BrowserJsonp_1 = new import9.BrowserJsonp()); }
return this.__BrowserJsonp_1;
}
get _ResponseOptions_2():import3.BaseResponseOptions {
if ((this.__ResponseOptions_2 == null)) { (this.__ResponseOptions_2 = new import3.BaseResponseOptions()); }
return this.__ResponseOptions_2;
}
get _JSONPBackend_3():import10.JSONPBackend_ {
if ((this.__JSONPBackend_3 == null)) { (this.__JSONPBackend_3 = new import10.JSONPBackend_(this._BrowserJsonp_1,this._ResponseOptions_2)); }
return this.__JSONPBackend_3;
}
get _RequestOptions_4():import5.BaseRequestOptions {
if ((this.__RequestOptions_4 == null)) { (this.__RequestOptions_4 = new import5.BaseRequestOptions()); }
return this.__RequestOptions_4;
}<|fim▁hole|> return this.__Jsonp_5;
}
createInternal():import1.JsonpModule {
this._JsonpModule_0 = new import1.JsonpModule();
return this._JsonpModule_0;
}
getInternal(token:any,notFoundResult:any):any {
if ((token === import1.JsonpModule)) { return this._JsonpModule_0; }
if ((token === import9.BrowserJsonp)) { return this._BrowserJsonp_1; }
if ((token === import3.ResponseOptions)) { return this._ResponseOptions_2; }
if ((token === import10.JSONPBackend)) { return this._JSONPBackend_3; }
if ((token === import5.RequestOptions)) { return this._RequestOptions_4; }
if ((token === import8.Jsonp)) { return this._Jsonp_5; }
return notFoundResult;
}
destroyInternal():void {
}
}
export const JsonpModuleNgFactory:import0.NgModuleFactory<import1.JsonpModule> = new import0.NgModuleFactory(JsonpModuleInjector,import1.JsonpModule);<|fim▁end|> | get _Jsonp_5():any {
if ((this.__Jsonp_5 == null)) { (this.__Jsonp_5 = import1.jsonpFactory(this._JSONPBackend_3,this._RequestOptions_4)); } |
<|file_name|>file.py<|end_file_name|><|fim▁begin|>from utils import textAppend, textPrepend, textCut, textEditLastChar, error, textCursorPos
class File:
""" Represents a file (A separated class allow to open several files at a time.
The class also holds the whole file content. (The vim buffers only store either the
accepted chunks, or the editing statement) """
def __init__(self, plugin, buffers):
self.windowsManager = plugin.windowsManager
self.coqManager = plugin.coqManager
self.input = buffers[0]
self.output = buffers[1]
# Each chunk is describe by the following tuple : (startPos, endPos, newLine), where startPos and endPos are coords tuple
self.chunks = []
# The whole file content
self.code = []
self.editPosition = (0, 0)
# We manage a virtual new-line at the end of the compiled buffer.
self.initOutputCursor()
def initOutputCursor(self):
""" Init the newline-cursor in the Compiled buffer. """
self.output.options['modifiable'] = True
del self.output[:]
self.drawNewlineCursor(False)
self.output.options['modifiable'] = False
self.editNewLine = False
# We backtrack every chunks
self.chunks = self.chunks[:- self.coqManager.rewind(len(self.chunks))]
def drawNewlineCursor(self, newLine):
if newLine:
self.windowsManager.commands('__Compiled__', ["normal G$a Dt"])
else:
self.windowsManager.commands('__Compiled__', ["normal G$a PR"])
def next(self):
nextChunk = self.windowsManager.input.getChunk(self.input, (0, 0))
if nextChunk :
if self.coqManager.sendChunk(nextChunk[0]):
if self.editNewLine:
chunkStart = (0, textCursorPos(self.output)[1] + 1, 2)
else:
chunkStart = textCursorPos(self.output, diffX = 3) # diffX=2 to ignore the newline-cursor
chunkStart = (chunkStart[0], chunkStart[1], 0)
chunk = textCut(self.input, (0, 0, 2), nextChunk[1])
self.output.options['modifiable'] = True
# Remove the last newline-cursor
self.windowsManager.commands('__Compiled__', ["normal G$a"])
textAppend(self.output, chunk, self.editNewLine)
self.editNewLine = nextChunk[2]
chunkEnd = textCursorPos(self.output)
if self.editNewLine:
self.drawNewlineCursor(True)
chunkEnd = (chunkEnd[0] + 1, chunkEnd[1], 1)
else:
self.drawNewlineCursor(False)
chunkEnd = (chunkEnd[0] + 1, chunkEnd[1], 0)
self.output.options['modifiable'] = False
self.chunks.append((chunkStart, chunkEnd, self.editNewLine))<|fim▁hole|>
def prev(self):
""" Backtrack of one chunk """
if len(self.chunks) <= 0:
print("No chunk to backtrack !")
return None
actualRewind = self.coqManager.rewind(1)
if actualRewind == 1:
self.output.options['modifiable'] = True
# Remove the last newline-cursor
self.windowsManager.commands('__Compiled__', ["normal G$a"])
lastChunk = self.chunks[-1]
chunk = textCut(self.output, lastChunk[0], lastChunk[1])
textPrepend(self.input, chunk, lastChunk[2])
self.chunks.pop()
if len(self.chunks) == 0:
self.editNewLine = False
else:
self.editNewLine = self.chunks[-1][2]
self.drawNewlineCursor(self.editNewLine)
self.output.options['modifiable'] = False
def write(self, filename):
try:
file = open(filename, 'w')
# We write the compiled buffer, and then the edit buffer
for i in xrange(len(self.output) - 1):
file.write(self.output[i] + "\n")
interline = self.output[-1][:-4] # We don't take the newline-cursor
if not self.editNewLine:
interline += self.input[0]
file.write(interline + "\n")
for i in xrange(0 if self.editNewLine else 1, len(self.input)):
file.write(self.input[i] + "\n")
file.close()
except IOError as e:
error(str(e))
def open(self, filename):
# First, clear the buffers
self.initOutputCursor()
del self.chunks[:]
del self.input[:]
try:
file = open(filename, 'r')
# We simply add every lines in the Edit buffer
firstLine = True
for line in file:
if firstLine: # We don't want to skip the first line
self.input[0] = line
firstLine = False
else: self.input.append(line)
file.close()
except IOError as e:
error(str(e))<|fim▁end|> | |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
mod callback;
mod executor;
mod lock;
mod promise;
use std::fmt::{self, Debug, Formatter};
use std::sync::Arc;
use futures::task::{self, Task};
use futures::{Async, Future, Poll};
use self::callback::{Abort, Request as RequestCallback, UnaryRequest as UnaryRequestCallback};
use self::executor::SpawnNotify;
use self::promise::{Batch as BatchPromise, Shutdown as ShutdownPromise};
use crate::call::server::RequestContext;
use crate::call::{BatchContext, Call, MessageReader};
use crate::cq::CompletionQueue;
use crate::error::{Error, Result};
use crate::server::RequestCallContext;
pub(crate) use self::executor::{Executor, Kicker};
pub use self::lock::SpinLock;
pub use self::promise::BatchType;
/// A handle that is used to notify future that the task finishes.
pub struct NotifyHandle<T> {
result: Option<Result<T>>,
task: Option<Task>,
stale: bool,
}
impl<T> NotifyHandle<T> {
fn new() -> NotifyHandle<T> {
NotifyHandle {
result: None,
task: None,
stale: false,
}
}
/// Set the result and notify future if necessary.
fn set_result(&mut self, res: Result<T>) -> Option<Task> {
self.result = Some(res);
self.task.take()
}
}
type Inner<T> = SpinLock<NotifyHandle<T>>;
fn new_inner<T>() -> Arc<Inner<T>> {
Arc::new(SpinLock::new(NotifyHandle::new()))
}
/// Get the future status without the need to poll.
///
/// If the future is polled successfully, this function will return None.
/// Not implemented as method as it's only for internal usage.
pub fn check_alive<T>(f: &CqFuture<T>) -> Result<()> {
let guard = f.inner.lock();
match guard.result {
None => Ok(()),
Some(Err(Error::RpcFailure(ref status))) => {
Err(Error::RpcFinished(Some(status.to_owned())))
}
Some(Ok(_)) | Some(Err(_)) => Err(Error::RpcFinished(None)),
}
}
/// A future object for task that is scheduled to `CompletionQueue`.
pub struct CqFuture<T> {
inner: Arc<Inner<T>>,
}
impl<T> CqFuture<T> {
fn new(inner: Arc<Inner<T>>) -> CqFuture<T> {
CqFuture { inner }
}
}
impl<T> Future for CqFuture<T> {
type Item = T;
type Error = Error;
fn poll(&mut self) -> Poll<T, Error> {
let mut guard = self.inner.lock();<|fim▁hole|>
if let Some(res) = guard.result.take() {
guard.stale = true;
return Ok(Async::Ready(res?));
}
// So the task has not been finished yet, add notification hook.
if guard.task.is_none() || !guard.task.as_ref().unwrap().will_notify_current() {
guard.task = Some(task::current());
}
Ok(Async::NotReady)
}
}
/// Future object for batch jobs.
pub type BatchFuture = CqFuture<Option<MessageReader>>;
/// A result holder for asynchronous execution.
// This enum is going to be passed to FFI, so don't use trait or generic here.
pub enum CallTag {
Batch(BatchPromise),
Request(RequestCallback),
UnaryRequest(UnaryRequestCallback),
Abort(Abort),
Shutdown(ShutdownPromise),
Spawn(SpawnNotify),
}
impl CallTag {
/// Generate a Future/CallTag pair for batch jobs.
pub fn batch_pair(ty: BatchType) -> (BatchFuture, CallTag) {
let inner = new_inner();
let batch = BatchPromise::new(ty, inner.clone());
(CqFuture::new(inner), CallTag::Batch(batch))
}
/// Generate a CallTag for request job. We don't have an eventloop
/// to pull the future, so just the tag is enough.
pub fn request(ctx: RequestCallContext) -> CallTag {
CallTag::Request(RequestCallback::new(ctx))
}
/// Generate a Future/CallTag pair for shutdown call.
pub fn shutdown_pair() -> (CqFuture<()>, CallTag) {
let inner = new_inner();
let shutdown = ShutdownPromise::new(inner.clone());
(CqFuture::new(inner), CallTag::Shutdown(shutdown))
}
/// Generate a CallTag for abort call before handler is called.
pub fn abort(call: Call) -> CallTag {
CallTag::Abort(Abort::new(call))
}
/// Generate a CallTag for unary request job.
pub fn unary_request(ctx: RequestContext, rc: RequestCallContext) -> CallTag {
let cb = UnaryRequestCallback::new(ctx, rc);
CallTag::UnaryRequest(cb)
}
/// Get the batch context from result holder.
pub fn batch_ctx(&self) -> Option<&BatchContext> {
match *self {
CallTag::Batch(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.batch_ctx()),
CallTag::Abort(ref cb) => Some(cb.batch_ctx()),
_ => None,
}
}
/// Get the request context from the result holder.
pub fn request_ctx(&self) -> Option<&RequestContext> {
match *self {
CallTag::Request(ref prom) => Some(prom.context()),
CallTag::UnaryRequest(ref cb) => Some(cb.request_ctx()),
_ => None,
}
}
/// Resolve the CallTag with given status.
pub fn resolve(self, cq: &CompletionQueue, success: bool) {
match self {
CallTag::Batch(prom) => prom.resolve(success),
CallTag::Request(cb) => cb.resolve(cq, success),
CallTag::UnaryRequest(cb) => cb.resolve(cq, success),
CallTag::Abort(_) => {}
CallTag::Shutdown(prom) => prom.resolve(success),
CallTag::Spawn(notify) => notify.resolve(success),
}
}
}
impl Debug for CallTag {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
CallTag::Batch(ref ctx) => write!(f, "CallTag::Batch({:?})", ctx),
CallTag::Request(_) => write!(f, "CallTag::Request(..)"),
CallTag::UnaryRequest(_) => write!(f, "CallTag::UnaryRequest(..)"),
CallTag::Abort(_) => write!(f, "CallTag::Abort(..)"),
CallTag::Shutdown(_) => write!(f, "CallTag::Shutdown"),
CallTag::Spawn(_) => write!(f, "CallTag::Spawn"),
}
}
}
#[cfg(test)]
mod tests {
use std::sync::mpsc::*;
use std::sync::*;
use std::thread;
use super::*;
use crate::env::Environment;
#[test]
fn test_resolve() {
let env = Environment::new(1);
let (cq_f1, tag1) = CallTag::shutdown_pair();
let (cq_f2, tag2) = CallTag::shutdown_pair();
let (tx, rx) = mpsc::channel();
let handler = thread::spawn(move || {
tx.send(cq_f1.wait()).unwrap();
tx.send(cq_f2.wait()).unwrap();
});
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag1.resolve(&env.pick_cq(), true);
assert!(rx.recv().unwrap().is_ok());
assert_eq!(rx.try_recv().unwrap_err(), TryRecvError::Empty);
tag2.resolve(&env.pick_cq(), false);
match rx.recv() {
Ok(Err(Error::ShutdownFailed)) => {}
res => panic!("expect shutdown failed, but got {:?}", res),
}
handler.join().unwrap();
}
}<|fim▁end|> | if guard.stale {
panic!("Resolved future is not supposed to be polled again.");
} |
<|file_name|>et.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2019, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'print', 'et', {<|fim▁hole|> toolbar: 'Printimine'
} );<|fim▁end|> | |
<|file_name|>test_ArchitecturalPattern.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import os
import json
from subprocess import call
import cairis.core.BorgFactory
from cairis.core.Borg import Borg
from cairis.core.RoleParameters import RoleParameters
from cairis.core.TemplateAssetParameters import TemplateAssetParameters
from cairis.core.TemplateGoalParameters import TemplateGoalParameters
from cairis.core.ValueTypeParameters import ValueTypeParameters
from cairis.core.ComponentParameters import ComponentParameters
from cairis.core.ConnectorParameters import ConnectorParameters
from cairis.core.ComponentViewParameters import ComponentViewParameters
__author__ = 'Shamal Faily'
class ComponentViewTest(unittest.TestCase):<|fim▁hole|>
def setUp(self):
call([os.environ['CAIRIS_CFG_DIR'] + "/initdb.sh"])
cairis.core.BorgFactory.initialise()
f = open(os.environ['CAIRIS_SRC'] + '/test/componentviews.json')
d = json.load(f)
f.close()
self.theRequirements = []
self.theRoles = []
self.iRoles = d['roles']
for i in self.iRoles:
self.theRoles.append(RoleParameters(i["theName"], i["theType"], i["theShortCode"], i["theDescription"],[]))
self.theMetricTypes = []
self.iAccessRights = d['access_rights']
for i in self.iAccessRights:
self.theMetricTypes.append(ValueTypeParameters(i["theName"], i["theDescription"], 'access_right','',i["theValue"],i["theRationale"]))
self.iSurfaceTypes = d['surface_types']
for i in self.iSurfaceTypes:
self.theMetricTypes.append(ValueTypeParameters(i["theName"], i["theDescription"], 'surface_type','',i["theValue"],i["theRationale"]))
self.iProtocols = d['protocols']
for i in self.iProtocols:
self.theMetricTypes.append(ValueTypeParameters(i["theName"], i["theDescription"], 'protocol','',i["theValue"],i["theRationale"]))
self.iPrivileges = d['privileges']
for i in self.iPrivileges:
self.theMetricTypes.append(ValueTypeParameters(i["theName"], i["theDescription"], 'privilege','',i["theValue"],i["theRationale"]))
self.theAssets = []
spValues = [0,0,0,0,0,0,0,0,]
srValues = ['None','None','None','None','None','None','None','None']
self.iTemplateAssets = d['template_assets']
for i in self.iTemplateAssets:
self.theAssets.append(TemplateAssetParameters(i["theName"], i["theShortCode"], i["theDescription"], i["theSignificance"],i["theType"],i["theSurfaceType"],i["theAccessRight"],spValues,srValues,[],[]))
self.theGoals = []
self.iTemplateGoals = d['template_goals']
for i in self.iTemplateGoals:
self.theGoals.append(TemplateGoalParameters(i["theName"],i["theDefinition"],i["theRationale"],i["theConcerns"],i["theResponsibilities"]))
self.iComponentViews = d['architectural_patterns']
def testComponentView(self):
cvName = self.iComponentViews[0]["theName"]
cvSyn = self.iComponentViews[0]["theSynopsis"]
theComponents = []
for c in self.iComponentViews[0]["theComponents"]:
cName = c["theName"]
cDesc = c["theDescription"]
cInts = []
for i in c["theInterfaces"]:
cInts.append((i["theName"],i["theType"],i["theAccessRight"],i["thePrivilege"]))
cStructs = []
for cs in c["theStructure"]:
cStructs.append((cs["theHeadAsset"],cs["theHeadAdornment"],cs["theHeadNav"],cs["theHeadNry"],cs["theHeadRole"],cs["theTailRole"],cs["theTailNry"],cs["theTailNav"],cs["theTailAdornment"],cs["theTailAsset"]))
cReqs = []
cGoals = []
for i in c["theGoals"]:
cGoals.append(i)
cGoalAssocs = []
for cga in c["theGoalAssociations"]:
cGoalAssocs.append((cga["theGoalName"],cga["theSubGoalName"],cga["theRefType"],'None'))
theComponents.append(ComponentParameters(cName,cDesc,cInts,cStructs,cReqs,cGoals,cGoalAssocs))
theConnectors = []
for conn in self.iComponentViews[0]["theConnectors"]:
theConnectors.append(ConnectorParameters(conn["theConnectorName"],cvName,conn["theFromComponent"],conn["theFromRole"],conn["theFromInterface"],conn["theToComponent"],conn["theToInterface"],conn["theToRole"],conn["theAssetName"],conn["theProtocol"],conn["theAccessRight"]))
icvp = ComponentViewParameters(cvName,cvSyn,self.theMetricTypes,self.theRoles,self.theAssets,self.theRequirements,self.theGoals,theComponents,theConnectors)
b = Borg()
b.dbProxy.addComponentView(icvp)
ocvps = b.dbProxy.getComponentViews()
ocvp = ocvps[cvName]
self.assertEqual(icvp.name(), ocvp.name())
self.assertEqual(icvp.synopsis(), ocvp.synopsis())
self.assertEqual(b.dbProxy.componentAttackSurface('Policy Manager'),3.0)
cg = b.dbProxy.componentGoalModel('Policy Manager')
icvp.setId(ocvp.id())
icvp.theSynopsis = 'revised synopsis'
b.dbProxy.updateComponentView(icvp)
ocvps = b.dbProxy.getComponentViews()
ocvp = ocvps[cvName]
self.assertEqual(icvp.name(), ocvp.name())
self.assertEqual(ocvp.synopsis(), 'revised synopsis')
b.dbProxy.deleteComponentView(ocvp.id())
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>differenceObject.ts<|end_file_name|><|fim▁begin|>/**
* Deep diff between two _object, using lodash
* @param _object Object compared<|fim▁hole|>import { transform } from './transform';
export function differenceObject(object: any, base: any): any {
return transform(object, function(result, value, key) {
if (!isEqual(value, base[key])) {
result[key] = isObject(value) && isObject(base[key]) ? differenceObject(value, base[key]) : value;
}
});
}<|fim▁end|> | * @param base Object to compare with
*/
import { isObject } from '../_common/isObject';
import { isEqual } from '../_common/isEqual'; |
<|file_name|>dependency_format.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Resolution of mixing rlibs and dylibs
//!
//! When producing a final artifact, such as a dynamic library, the compiler has
//! a choice between linking an rlib or linking a dylib of all upstream
//! dependencies. The linking phase must guarantee, however, that a library only
//! show up once in the object file. For example, it is illegal for library A to
//! be statically linked to B and C in separate dylibs, and then link B and C
//! into a crate D (because library A appears twice).
//!
//! The job of this module is to calculate what format each upstream crate
//! should be used when linking each output type requested in this session. This
//! generally follows this set of rules:
//!
//! 1. Each library must appear exactly once in the output.
//! 2. Each rlib contains only one library (it's just an object file)
//! 3. Each dylib can contain more than one library (due to static linking),
//! and can also bring in many dynamic dependencies.
//!
//! With these constraints in mind, it's generally a very difficult problem to
//! find a solution that's not "all rlibs" or "all dylibs". I have suspicions
//! that NP-ness may come into the picture here...
//!
//! The current selection algorithm below looks mostly similar to:
//!
//! 1. If static linking is required, then require all upstream dependencies
//! to be available as rlibs. If not, generate an error.
//! 2. If static linking is requested (generating an executable), then
//! attempt to use all upstream dependencies as rlibs. If any are not
//! found, bail out and continue to step 3.
//! 3. Static linking has failed, at least one library must be dynamically
//! linked. Apply a heuristic by greedily maximizing the number of
//! dynamically linked libraries.
//! 4. Each upstream dependency available as a dynamic library is
//! registered. The dependencies all propagate, adding to a map. It is
//! possible for a dylib to add a static library as a dependency, but it
//! is illegal for two dylibs to add the same static library as a
//! dependency. The same dylib can be added twice. Additionally, it is
//! illegal to add a static dependency when it was previously found as a
//! dylib (and vice versa)
//! 5. After all dynamic dependencies have been traversed, re-traverse the
//! remaining dependencies and add them statically (if they haven't been
//! added already).
//!
//! While not perfect, this algorithm should help support use-cases such as leaf
//! dependencies being static while the larger tree of inner dependencies are
//! all dynamic. This isn't currently very well battle tested, so it will likely
//! fall short in some use cases.
//!
//! Currently, there is no way to specify the preference of linkage with a
//! particular library (other than a global dynamic/static switch).
//! Additionally, the algorithm is geared towards finding *any* solution rather
//! than finding a number of solutions (there are normally quite a few).
use syntax::ast;
use session;
use session::config;
use metadata::cstore;
use metadata::csearch;
use middle::ty;
use util::nodemap::FnvHashMap;
/// A list of dependencies for a certain crate type.
///
/// The length of this vector is the same as the number of external crates used.
/// The value is None if the crate does not need to be linked (it was found
/// statically in another dylib), or Some(kind) if it needs to be linked as
/// `kind` (either static or dynamic).
pub type DependencyList = Vec<Option<cstore::LinkagePreference>>;<|fim▁hole|>
/// A mapping of all required dependencies for a particular flavor of output.
///
/// This is local to the tcx, and is generally relevant to one session.
pub type Dependencies = FnvHashMap<config::CrateType, DependencyList>;
pub fn calculate(tcx: &ty::ctxt) {
let mut fmts = tcx.dependency_formats.borrow_mut();
for &ty in tcx.sess.crate_types.borrow().iter() {
fmts.insert(ty, calculate_type(&tcx.sess, ty));
}
tcx.sess.abort_if_errors();
}
fn calculate_type(sess: &session::Session,
ty: config::CrateType) -> DependencyList {
match ty {
// If the global prefer_dynamic switch is turned off, first attempt
// static linkage (this can fail).
config::CrateTypeExecutable if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
}
}
// No linkage happens with rlibs, we just needed the metadata (which we
// got long ago), so don't bother with anything.
config::CrateTypeRlib => return Vec::new(),
// Staticlibs must have all static dependencies. If any fail to be
// found, we generate some nice pretty errors.
config::CrateTypeStaticlib => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
}
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.rlib.is_some() { return }
sess.err(&format!("dependency `{}` not found in rlib format",
data.name));
});
return Vec::new();
}
// Generating a dylib without `-C prefer-dynamic` means that we're going
// to try to eagerly statically link all dependencies. This is normally
// done for end-product dylibs, not intermediate products.
config::CrateTypeDylib if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) {
Some(v) => return v,
None => {}
}
}
// Everything else falls through below
config::CrateTypeExecutable | config::CrateTypeDylib => {},
}
let mut formats = FnvHashMap();
// Sweep all crates for found dylibs. Add all dylibs, as well as their
// dependencies, ensuring there are no conflicts. The only valid case for a
// dependency to be relied upon twice is for both cases to rely on a dylib.
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.dylib.is_some() {
debug!("adding dylib: {}", data.name);
add_library(sess, cnum, cstore::RequireDynamic, &mut formats);
let deps = csearch::get_dylib_dependency_formats(&sess.cstore, cnum);
for &(depnum, style) in &deps {
debug!("adding {:?}: {}", style,
sess.cstore.get_crate_data(depnum).name.clone());
add_library(sess, depnum, style, &mut formats);
}
}
});
// Collect what we've got so far in the return vector.
let mut ret = (1..sess.cstore.next_crate_num()).map(|i| {
match formats.get(&i).cloned() {
v @ Some(cstore::RequireDynamic) => v,
_ => None,
}
}).collect::<Vec<_>>();
// Run through the dependency list again, and add any missing libraries as
// static libraries.
sess.cstore.iter_crate_data(|cnum, data| {
let src = sess.cstore.get_used_crate_source(cnum).unwrap();
if src.dylib.is_none() && !formats.contains_key(&cnum) {
assert!(src.rlib.is_some());
debug!("adding staticlib: {}", data.name);
add_library(sess, cnum, cstore::RequireStatic, &mut formats);
ret[cnum as usize - 1] = Some(cstore::RequireStatic);
}
});
// When dylib B links to dylib A, then when using B we must also link to A.
// It could be the case, however, that the rlib for A is present (hence we
// found metadata), but the dylib for A has since been removed.
//
// For situations like this, we perform one last pass over the dependencies,
// making sure that everything is available in the requested format.
for (cnum, kind) in ret.iter().enumerate() {
let cnum = cnum as ast::CrateNum;
let src = sess.cstore.get_used_crate_source(cnum + 1).unwrap();
match *kind {
None => continue,
Some(cstore::RequireStatic) if src.rlib.is_some() => continue,
Some(cstore::RequireDynamic) if src.dylib.is_some() => continue,
Some(kind) => {
let data = sess.cstore.get_crate_data(cnum + 1);
sess.err(&format!("crate `{}` required to be available in {}, \
but it was not available in this form",
data.name,
match kind {
cstore::RequireStatic => "rlib",
cstore::RequireDynamic => "dylib",
}));
}
}
}
return ret;
}
fn add_library(sess: &session::Session,
cnum: ast::CrateNum,
link: cstore::LinkagePreference,
m: &mut FnvHashMap<ast::CrateNum, cstore::LinkagePreference>) {
match m.get(&cnum) {
Some(&link2) => {
// If the linkages differ, then we'd have two copies of the library
// if we continued linking. If the linkages are both static, then we
// would also have two copies of the library (static from two
// different locations).
//
// This error is probably a little obscure, but I imagine that it
// can be refined over time.
if link2 != link || link == cstore::RequireStatic {
let data = sess.cstore.get_crate_data(cnum);
sess.err(&format!("cannot satisfy dependencies so `{}` only \
shows up once",
data.name));
sess.help("having upstream crates all available in one format \
will likely make this go away");
}
}
None => { m.insert(cnum, link); }
}
}
fn attempt_static(sess: &session::Session) -> Option<DependencyList> {
let crates = sess.cstore.get_used_crates(cstore::RequireStatic);
if crates.iter().by_ref().all(|&(_, ref p)| p.is_some()) {
Some(crates.into_iter().map(|_| Some(cstore::RequireStatic)).collect())
} else {
None
}
}<|fim▁end|> | |
<|file_name|>pkg_config.py<|end_file_name|><|fim▁begin|>from __future__ import print_function
import shlex
import subprocess
import sys
from .config import Configuration
class PkgConfig(object):
class Error(Exception):
"""Raised when information could not be obtained from pkg-config."""
def __init__(self, package_name):
"""Query pkg-config for information about a package.
:type package_name: str
:param package_name: The name of the package to query.
:raises PkgConfig.Error: When a call to pkg-config fails.
"""
self.package_name = package_name
self._cflags = self._call("--cflags")
self._cflags_only_I = self._call("--cflags-only-I")
self._cflags_only_other = self._call("--cflags-only-other")
self._libs = self._call("--libs")
self._libs_only_l = self._call("--libs-only-l")
self._libs_only_L = self._call("--libs-only-L")
self._libs_only_other = self._call("--libs-only-other")
def _call(self, *pkg_config_args):
try:
cmd = [Configuration.current.pkg_config] + list(pkg_config_args) + [self.package_name]<|fim▁hole|> raise self.Error("pkg-config exited with error code {}".format(e.returncode))
@property
def swiftc_flags(self):
"""Flags for this package in a format suitable for passing to `swiftc`.
:rtype: list[str]
"""
return (
["-Xcc {}".format(s) for s in self._cflags_only_other]
+ ["-Xlinker {}".format(s) for s in self._libs_only_other]
+ self._cflags_only_I
+ self._libs_only_L
+ self._libs_only_l)
@property
def cflags(self):
"""CFLAGS for this package.
:rtype: list[str]
"""
return self._cflags
@property
def ldflags(self):
"""LDFLAGS for this package.
:rtype: list[str]
"""
return self._libs<|fim▁end|> | print("Executing command '{}'".format(cmd), file=sys.stderr)
return shlex.split(subprocess.check_output(cmd).decode('utf-8'))
except subprocess.CalledProcessError as e: |
<|file_name|>PrefixFileFilter.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.io.filefilter;
import java.io.File;
import java.io.Serializable;
import java.util.List;
import org.apache.commons.io.IOCase;
/**
* Filters filenames for a certain prefix.
* <p>
* For example, to print all files and directories in the
* current directory whose name starts with <code>Test</code>:
*
* <pre>
* File dir = new File(".");
* String[] files = dir.list( new PrefixFileFilter("Test") );
* for ( int i = 0; i < files.length; i++ ) {
* System.out.println(files[i]);
* }
* </pre>
*
* @since Commons IO 1.0
* @version $Revision: 1005099 $ $Date: 2010-10-06 17:13:01 +0100 (Wed, 06 Oct 2010) $
*
* @author Stephen Colebourne<|fim▁hole|> * @author Federico Barbieri
* @author Serge Knystautas
* @author Peter Donald
* @see FileFilterUtils#prefixFileFilter(String)
* @see FileFilterUtils#prefixFileFilter(String, IOCase)
*/
public class PrefixFileFilter extends AbstractFileFilter implements Serializable {
/** The filename prefixes to search for */
private final String[] prefixes;
/** Whether the comparison is case sensitive. */
private final IOCase caseSensitivity;
/**
* Constructs a new Prefix file filter for a single prefix.
*
* @param prefix the prefix to allow, must not be null
* @throws IllegalArgumentException if the prefix is null
*/
public PrefixFileFilter(String prefix) {
this(prefix, IOCase.SENSITIVE);
}
/**
* Constructs a new Prefix file filter for a single prefix
* specifying case-sensitivity.
*
* @param prefix the prefix to allow, must not be null
* @param caseSensitivity how to handle case sensitivity, null means case-sensitive
* @throws IllegalArgumentException if the prefix is null
* @since Commons IO 1.4
*/
public PrefixFileFilter(String prefix, IOCase caseSensitivity) {
if (prefix == null) {
throw new IllegalArgumentException("The prefix must not be null");
}
this.prefixes = new String[] {prefix};
this.caseSensitivity = (caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity);
}
/**
* Constructs a new Prefix file filter for any of an array of prefixes.
* <p>
* The array is not cloned, so could be changed after constructing the
* instance. This would be inadvisable however.
*
* @param prefixes the prefixes to allow, must not be null
* @throws IllegalArgumentException if the prefix array is null
*/
public PrefixFileFilter(String[] prefixes) {
this(prefixes, IOCase.SENSITIVE);
}
/**
* Constructs a new Prefix file filter for any of an array of prefixes
* specifying case-sensitivity.
* <p>
* The array is not cloned, so could be changed after constructing the
* instance. This would be inadvisable however.
*
* @param prefixes the prefixes to allow, must not be null
* @param caseSensitivity how to handle case sensitivity, null means case-sensitive
* @throws IllegalArgumentException if the prefix is null
* @since Commons IO 1.4
*/
public PrefixFileFilter(String[] prefixes, IOCase caseSensitivity) {
if (prefixes == null) {
throw new IllegalArgumentException("The array of prefixes must not be null");
}
this.prefixes = new String[prefixes.length];
System.arraycopy(prefixes, 0, this.prefixes, 0, prefixes.length);
this.caseSensitivity = (caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity);
}
/**
* Constructs a new Prefix file filter for a list of prefixes.
*
* @param prefixes the prefixes to allow, must not be null
* @throws IllegalArgumentException if the prefix list is null
* @throws ClassCastException if the list does not contain Strings
*/
public PrefixFileFilter(List<String> prefixes) {
this(prefixes, IOCase.SENSITIVE);
}
/**
* Constructs a new Prefix file filter for a list of prefixes
* specifying case-sensitivity.
*
* @param prefixes the prefixes to allow, must not be null
* @param caseSensitivity how to handle case sensitivity, null means case-sensitive
* @throws IllegalArgumentException if the prefix list is null
* @throws ClassCastException if the list does not contain Strings
* @since Commons IO 1.4
*/
public PrefixFileFilter(List<String> prefixes, IOCase caseSensitivity) {
if (prefixes == null) {
throw new IllegalArgumentException("The list of prefixes must not be null");
}
this.prefixes = prefixes.toArray(new String[prefixes.size()]);
this.caseSensitivity = (caseSensitivity == null ? IOCase.SENSITIVE : caseSensitivity);
}
/**
* Checks to see if the filename starts with the prefix.
*
* @param file the File to check
* @return true if the filename starts with one of our prefixes
*/
@Override
public boolean accept(File file) {
String name = file.getName();
for (String prefix : this.prefixes) {
if (caseSensitivity.checkStartsWith(name, prefix)) {
return true;
}
}
return false;
}
/**
* Checks to see if the filename starts with the prefix.
*
* @param file the File directory
* @param name the filename
* @return true if the filename starts with one of our prefixes
*/
@Override
public boolean accept(File file, String name) {
for (String prefix : prefixes) {
if (caseSensitivity.checkStartsWith(name, prefix)) {
return true;
}
}
return false;
}
/**
* Provide a String representaion of this file filter.
*
* @return a String representaion
*/
@Override
public String toString() {
StringBuilder buffer = new StringBuilder();
buffer.append(super.toString());
buffer.append("(");
if (prefixes != null) {
for (int i = 0; i < prefixes.length; i++) {
if (i > 0) {
buffer.append(",");
}
buffer.append(prefixes[i]);
}
}
buffer.append(")");
return buffer.toString();
}
}<|fim▁end|> | |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// Author: Alex Chernyakhovsky ([email protected])
// TODO(achernya): Per the documentation on doc.rust-lang.org, std::io
// is not yet ready, so use old_io until 1.0-final.
#![feature(old_io)]
// std::env contains a lot of nice functions that otherwise would
// require std::os to use; std::os has lots of deprecated functions.
#![feature(env)]
// TODO(achernya): Remove this feature when std::env moves over to
// std::path.
#![feature(old_path)]<|fim▁hole|>use std::old_io as io;
// println_stderr is like println, but to stderr.
macro_rules! println_stderr(
($($arg:tt)*) => (
match writeln!(&mut io::stderr(), $($arg)* ) {
Ok(_) => {},
Err(x) => panic!("Unable to write to stderr: {}", x),
}
)
);
// ShellCommand is a trait that defines a runnable POSIX shell
// command. An implementation is an abstract representation of shell
// commands such as simple invocations, invocations with redirection,
// and even shell pipelines.
trait ShellCommand {
fn run(&self);
}
fn shell_loop() {
let mut stdin = io::stdin();
loop {
print!("$ ");
let line = stdin.read_line();
match line {
Ok(expr) => handle_command(&expr),
Err(_) => break,
}
}
}
fn handle_command(user_expr: &str) {
// Clean up the string by removing the newline at the end
let expr = user_expr.trim_matches('\n');
let components: Vec<&str> = expr.split(' ').collect();
if builtins(&components) {
return;
}
}
fn builtins(command: &Vec<&str>) -> bool {
match command[0] {
"cd" => cd(command),
"pwd" => pwd(),
_ => return false,
}
true
}
fn cd(command: &Vec<&str>) {
// cd is the "change directory" command. It can take either 0 or 1
// arguments. If given no arguments, then the $HOME directory is
// chosen.
let dir: Option<Path> = match command.len() {
0 => panic!("invalid cd invocation"),
1 => env::home_dir(),
_ => Some(Path::new(command[1]))
};
if dir.is_none() {
println_stderr!("cd: no directory to change to");
return;
}
let directory = dir.unwrap();
let result = env::set_current_dir(&directory);
match result {
Err(err) => {
println_stderr!("cd: {}: {}", directory.display(), err);
},
_ => {},
}
}
fn pwd() {
let p = env::current_dir().unwrap_or(Path::new("/"));
println!("{}", p.display());
}
fn main() {
// TODO(achernya): is there any initialization we want to do
// before we enter the shell loop?
shell_loop();
}<|fim▁end|> |
use std::env; |
<|file_name|>bans.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# This file is part of Morse.
#
# Morse is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Morse is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Morse. If not, see <http://www.gnu.org/licenses/>.
from . import db
from iptools import IpRange
from datetime import datetime, timedelta
from core import Board, User
class Ban (db.Model):
""" Ban is an abstract model for IPBan and UserBan. It provides
methods to check for affected boards and some to get different parts
of the ban duration """
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
reason = db.Column(db.String)
duration = db.Column(db.Interval)
expiration_date = db.Column(db.DateTime)
def __init__ (self, reason, duration_in_days = None):
self.reason = reason
if duration_in_days:
self.duration = timedelta(days = duration_in_days)
self.expiration_date = datetime.now() + self.duration
def applies_to (self, board):
""" signifies whether a ban applies to a certain board """
affected = self.affected_board_ids
return board.id in affected
@property
def affected_boards (self):
""" a list of all affected boards """
for board_id in self.affected_board_ids:
yield Board.query.get(board_id)
@property
def is_permanent (self):
return self.expiration_date is None
def update_duration_in_days (self, duration):
if duration is None:
self.duration = None
self.expiration_date = None
else:
if self.is_permanent:
old_beginning = datetime.now()
else:
old_beginning = self.expiration_date - self.duration
self.duration = timedelta(days = duration)
self.expiration_date = old_beginning + self.duration
duration_in_days = property(fset = update_duration_in_days)
@property
def has_expired (self):
if self.is_permanent:
return False
return self.expiration_date < datetime.now()
@property
def percentage_of_time_served (self):
if self.is_permanent:
return 0
if self.has_expired:
return 100
served = self.time_served
served_in_seconds = served.days * 24 * 60**2 + served.seconds
duration = self.duration
duration_in_seconds = duration.days * 24 * 60**2 + duration.seconds
percentage = (100 * served_in_seconds) / duration_in_seconds
return percentage
@property
def percentage_of_time_left (self):<|fim▁hole|> @property
def time_served (self):
""" a timedelta object that signifies the
served time (only possible on limited bans) """
if self.is_permanent:
raise TypeError("this method is not available on permanent bans")
return self.duration - self.time_left
@property
def time_left (self):
""" a timedelta object that signifies the
time left to serve (only possible on limited bans) """
if self.is_permanent:
raise TypeError("this method is not available on permanent bans")
return self.expiration_date - datetime.now()
@property
def days_left (self):
""" an integer that signifies the number of days
left to serve (only possible on limited bans) """
if self.is_permanent:
raise TypeError("this method is not available on permanent bans")
return self.time_left.days
@property
def hours_left (self):
""" an integer that signifies the number of hours
left to serve (only possible on limited bans)
!!! this attribute DOES NOT signify the absolute
number of hours left, but rather the numbers of
hours left modulo 24
"""
if self.is_permanent:
raise TypeError("this method is not available on permanent bans")
seconds = self.time_left.seconds
return seconds // 60**2
@property
def minutes_left (self):
""" an integer that signifies the number of minutes
left to serve (only possible on limited bans)
!!! this attribute DOES NOT signify the absolute
number of minutes left, but rather the numbers of
minutes left modulo 60
"""
if self.is_permanent:
raise TypeError("this method is not available on permanent bans")
seconds = self.time_left.seconds
seconds_without_hours = seconds % 60**2
return seconds_without_hours // 60
class IPBan (Ban):
""" model for IP bans """
__tablename__ = "ip_bans"
ip_range = db.Column(db.String)
def __init__ (self, ip_range, reason, duration_in_days = None):
Ban.__init__(self, reason, duration_in_days)
self.ip_range = ip_range
@property
def affected_ips (self):
""" use this property instead of ip_range. it provides a
iptools.IpRange object instead of a simple string, which
means you can perform containment operations (e.g.
"my_ip in ban.ip_range" and the like) """
return IpRange(self.ip_range)
@property
def affected_board_ids (self):
""" an ID list of all affected boards """
query = IPBannedOn.query
query = query.filter(IPBannedOn.ban_id == self.id)
board_id_generator = query.values(IPBannedOn.board_id)
board_ids = [oneple[0] for oneple in board_id_generator]
return board_ids
class IPBannedOn (db.Model):
""" A relation between ip bans and boards, that signify
which boards are affected by a certain ip ban """
__tablename__ = "ip_banned_on"
ban_id = db.Column(db.Integer, primary_key=True)
board_id = db.Column(db.Integer, primary_key=True)
def __init__ (self, board_id, ban_id):
self.board_id = board_id
self.ban_id = ban_id
class UserBan (Ban):
""" model for user bans """
__tablename__ = "user_bans"
user_id = db.Column(db.ForeignKey("users.id"))
def __init__ (self, user_id, reason, duration_in_days = None):
Ban.__init__(self, reason, duration_in_days)
self.user_id, user_id
@property
def affected_user (self):
return User.query.get(self.user_id)
@property
def affected_board_ids (self):
""" an ID list of all affected boards """
query = UserBannedOn.query
query = query.filter(UserBannedOn.ban_id == self.id)
board_id_generator = query.values(UserBannedOn.board_id)
board_ids = [oneple[0] for oneple in board_id_generator]
return board_ids
class UserBannedOn (db.Model):
""" A relation between user bans and boards, that signify
which boards are affected by a certain user ban """
__tablename__ = "user_banned_on"
ban_id = db.Column(db.Integer, primary_key=True)
board_id = db.Column(db.Integer, primary_key=True)
def __init__ (self, board_id, ban_id):
self.board_id = board_id
self.ban_id = ban_id<|fim▁end|> | return 100 - self.percentage_of_time_served
|
<|file_name|>issue-4228.rs<|end_file_name|><|fim▁begin|>// run-pass
// pretty-expanded FIXME #23616
struct Foo;
impl Foo {<|fim▁hole|>}
pub fn main() {
Foo::first();
Foo::second();
}<|fim▁end|> | fn first() {}
}
impl Foo {
fn second() {} |
<|file_name|>test_remoteexecution.py<|end_file_name|><|fim▁begin|>"""Test for Remote Execution
:Requirement: Remoteexecution
:CaseAutomation: Automated
:CaseLevel: Acceptance
:CaseComponent: RemoteExecution
:Assignee: pondrejk
:TestType: Functional
:CaseImportance: High
:Upstream: No
"""
import pytest
from nailgun import client
from nailgun.entity_mixins import TaskFailedError
from robottelo.api.utils import wait_for_tasks
CAPSULE_TARGET_VERSION = '6.10.z'
@pytest.mark.tier4
def test_positive_run_capsule_upgrade_playbook(capsule_configured, default_sat):
"""Run Capsule Upgrade playbook against an External Capsule
:id: 9ec6903d-2bb7-46a5-8002-afc74f06d83b
:steps:
1. Create a Capsule VM, add REX key.
2. Run the Capsule Upgrade Playbook.
:expectedresults: Capsule is upgraded successfully
:CaseImportance: Medium
"""
template_id = (
default_sat.api.JobTemplate()
.search(query={'search': 'name="Capsule Upgrade Playbook"'})[0]
.id
)
capsule_configured.add_rex_key(satellite=default_sat)
job = default_sat.api.JobInvocation().run(
synchronous=False,
data={
'job_template_id': template_id,
'inputs': {
'target_version': CAPSULE_TARGET_VERSION,
'whitelist_options': 'repositories-validate,repositories-setup',
},
'targeting_type': 'static_query',
'search_query': f'name = {capsule_configured.hostname}',
},
)
wait_for_tasks(f'resource_type = JobInvocation and resource_id = {job["id"]}')
result = default_sat.api.JobInvocation(id=job['id']).read()
assert result.succeeded == 1
result = default_sat.execute('foreman-maintain health check')
assert result.status == 0
for line in result.stdout:
assert 'FAIL' not in line
result = default_sat.api.SmartProxy(
id=default_sat.api.SmartProxy(name=default_sat.hostname).search()[0].id
).refresh()<|fim▁hole|> assert {'Discovery', 'Dynflow', 'Ansible', 'SSH', 'Logs', 'Pulp'}.issubset(feature_list)
@pytest.mark.destructive
def test_negative_run_capsule_upgrade_playbook_on_satellite(default_sat):
"""Run Capsule Upgrade playbook against the Satellite itself
:id: 99462a11-5133-415d-ba64-4354da539a34
:steps:
1. Add REX key to the Satellite server.
2. Run the Capsule Upgrade Playbook.
3. Check the job output for proper failure reason.
:expectedresults: Should fail
:CaseImportance: Medium
"""
sat = default_sat.nailgun_host
template_id = (
default_sat.api.JobTemplate()
.search(query={'search': 'name="Capsule Upgrade Playbook"'})[0]
.id
)
default_sat.add_rex_key(satellite=default_sat)
with pytest.raises(TaskFailedError) as error:
default_sat.api.JobInvocation().run(
data={
'job_template_id': template_id,
'inputs': {
'target_version': CAPSULE_TARGET_VERSION,
'whitelist_options': "repositories-validqqate,repositories-setup",
},
'targeting_type': "static_query",
'search_query': f"name = {sat.name}",
}
)
assert 'A sub task failed' in error.value.args[0]
job = default_sat.api.JobInvocation().search(
query={'search': f'host={sat.name},status=failed,description="Capsule Upgrade Playbook"'}
)[0]
response = client.get(
f'{default_sat.url}/api/job_invocations/{job.id}/hosts/{sat.id}',
auth=(default_sat.username, default_sat.password),
verify=False,
)
assert 'This playbook cannot be executed on a Satellite server.' in response.text<|fim▁end|> | feature_list = [feat['name'] for feat in result['features']] |
<|file_name|>limited_heap.rs<|end_file_name|><|fim▁begin|>use min_max_heap::{IntoIter, MinMaxHeap};
use std::iter::IntoIterator;
use super::weighted::WeightedItem;
/// A heap that only allows a constant amount of items. It will keep the items
/// with the highest priority.
#[derive(Clone)]
pub struct LimitedHeap<I, W>
where
W: Ord,
{
heap: MinMaxHeap<WeightedItem<I, W>>,
capacity: usize,
}
impl<I, W: Ord> LimitedHeap<I, W> {
pub fn with_capacity(capacity: usize) -> Self {
LimitedHeap {
heap: MinMaxHeap::with_capacity(capacity),
capacity,
}
}
pub fn push(&mut self, element: I, priority: W) -> Option<I> {
if self.capacity > self.heap.len() {
self.heap.push(WeightedItem(element, priority));
None
} else {
Some(self.heap.push_pop_min(WeightedItem(element, priority)).0)
}<|fim▁hole|> self.heap.pop_max().map(|wi| wi.0)
}
pub fn clear(&mut self) {
self.heap.clear();
}
pub fn len(&self) -> usize {
self.heap.len()
}
pub fn peek(&self) -> Option<&I> {
self.heap.peek_max().map(|wi| &wi.0)
}
}
pub mod weighted {
use crate::agenda::weighted::Weighted;
/// An adapter for `super::LimitedHeap` that uses the priority given by the
/// items' implementation of `Weighted`.
pub struct LimitedHeap<I: Weighted>(super::LimitedHeap<I, I::Weight>)
where
I::Weight: Ord;
impl<I: Weighted> LimitedHeap<I>
where
I::Weight: Ord,
{
pub fn with_capacity(capacity: usize) -> Self {
LimitedHeap(super::LimitedHeap::with_capacity(capacity))
}
pub fn push(&mut self, element: I) -> Option<I> {
let priority = element.get_weight();
self.0.push(element, priority)
}
pub fn pop(&mut self) -> Option<I> {
self.0.pop()
}
pub fn clear(&mut self) {
self.0.clear()
}
pub fn len(&self) -> usize {
self.0.len()
}
pub fn peek(&self) -> Option<&I> {
self.0.peek()
}
}
impl<I: Weighted> IntoIterator for LimitedHeap<I>
where
I::Weight: Ord,
{
type IntoIter = <super::LimitedHeap<I, I::Weight> as IntoIterator>::IntoIter;
type Item = I;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
}
use super::weighted::RemoveWeight;
impl<I, W> IntoIterator for LimitedHeap<I, W>
where
W: Ord,
{
type IntoIter = RemoveWeight<I, W, IntoIter<WeightedItem<I, W>>>;
type Item = I;
fn into_iter(self) -> Self::IntoIter {
self.heap.into_iter().into()
}
}<|fim▁end|> | }
pub fn pop(&mut self) -> Option<I> { |
<|file_name|>EasyController.py<|end_file_name|><|fim▁begin|># Copyright 2009 by Tiago Antao <[email protected]>. All rights reserved.
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""
This module allows to control GenePop through an easier interface.
This interface is less efficient than the standard GenePopControler
"""
from Controller import GenePopController
from Bio.PopGen import GenePop
class EasyController:
def __init__(self, fname, genepop_dir = None):
"""Initializes the controller.
genepop_dir is the directory where GenePop is.
The binary should be called Genepop (capital G)
"""
self._fname = fname
self._controller = GenePopController(genepop_dir)
self.__fst_pair_locus = {} #More caches like this needed!
def get_basic_info(self):
f=open(self._fname)
rec = GenePop.read(f)
f.close()
return rec.pop_list, rec.loci_list
def test_hw_pop(self, pop_pos, test_type = "probability"):
if test_type=="deficiency":
hw_res = self._controller.test_pop_hz_deficiency(self._fname)
elif test_type=="excess":
hw_res = self._controller.test_pop_hz_excess(self._fname)
else:
loci_res, hw_res, fisher_full = self._controller.test_pop_hz_prob(self._fname, ".P")
for i in range(pop_pos-1):
hw_res.next()
return hw_res.next()
def test_hw_global(self, test_type = "deficiency", enum_test = True,
dememorization = 10000, batches = 20, iterations = 5000):
if test_type=="deficiency":
pop_res, loc_res, all = self._controller.test_global_hz_deficiency(self._fname,
enum_test, dememorization, batches, iterations)
else:
pop_res, loc_res, all = self._controller.test_global_hz_excess(self._fname,
enum_test, dememorization, batches, iterations)
return list(pop_res), list(loc_res), all
def test_ld_all_pair(self, locus1, locus2,
dememorization = 10000, batches = 20, iterations = 5000):
all_ld = self._controller.test_ld(self._fname, dememorization, batches, iterations)[1]
for ld_case in all_ld:
(l1, l2), result = ld_case
if (l1==locus1 and l2==locus2) or (l1==locus2 and l2==locus1):
return result
def estimate_nm(self):
""" Estimate Nm. Just a simple bridge.
"""
return self._controller.estimate_nm(self._fname)
def get_heterozygosity_info(self, pop_pos, locus_name):
"""Returns the heterozygosity info for a certain locus on a population.
Returns (Expected homozygotes, observed homozygotes,
Expected heterozygotes, observed heterozygotes)
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][1]
def get_genotype_count(self, pop_pos, locus_name):<|fim▁hole|>
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][0]
def get_fis(self, pop_pos, locus_name):
"""Returns the Fis for a certain population and locus
Below CW means Cockerham and Weir and RH means Robertson and Hill.
Returns a pair:
dictionary [allele] = (repetition count, frequency, Fis CW )
with information for each allele
a triple with total number of alleles, Fis CW, Fis RH
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pops = list(pop_iter)
return pops[pop_pos][1][locus_name][2:]
def get_alleles(self, pop_pos, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
pop_iter = list(pop_iter)
return pop_iter[pop_pos][1][locus_name][2].keys()
def get_alleles_all_pops(self, locus_name):
"""Returns the alleles for a certain population and locus.
"""
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
return locus_info[1]
def get_allele_frequency(self, pop_pos, locus_name):
geno_freqs = self._controller.calc_allele_genotype_freqs(self._fname)
pop_iter, loc_iter = geno_freqs
for locus_info in loc_iter:
if locus_info[0] == locus_name:
alleles = locus_info[1]
pop_name, freqs, total = locus_info[2][pop_pos]
allele_freq = {}
for i in range(len(alleles)):
allele_freq[alleles[i]] = freqs[i]
return total, allele_freq
def get_multilocus_f_stats(self):
""" Returns the multilocus F stats
Explain averaging.
Returns Fis(CW), Fst, Fit
"""
return self._controller.calc_fst_all(self._fname)[0]
def get_f_stats(self, locus_name):
""" Returns F stats for a locus
Returns Fis(CW), Fst, Fit, Qintra, Qinter
"""
loci_iter = self._controller.calc_fst_all(self._fname)[1]
for name, fis, fst, fit, qintra, qinter in loci_iter:
if name == locus_name:
return fis, fst, fit, qintra, qinter
def get_avg_fis(self):
return self._controller.calc_diversities_fis_with_identity(self._fname)[1]
def get_avg_fst_pair(self):
return self._controller.calc_fst_pair(self._fname)[1]
def get_avg_fst_pair_locus(self, locus):
if len(self.__fst_pair_locus) == 0:
iter = self._controller.calc_fst_pair(self._fname)[0]
for locus_info in iter:
self.__fst_pair_locus[locus_info[0]] = locus_info[1]
return self.__fst_pair_locus[locus]
def calc_ibd(self, is_diplo = True, stat="a", scale="Log", min_dist=0.00001):
if is_diplo:
return self._controller.calc_ibd_diplo(self._fname, stat, scale, min_dist)
else:
return self._controller.calc_ibd_haplo(self._fname, stat, scale, min_dist)<|fim▁end|> | """Returns the genotype counts for a certain population and locus |
<|file_name|>profile_extras.py<|end_file_name|><|fim▁begin|>from django import template
from django.template.defaultfilters import stringfilter
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe
from usermgmt import utils
register = template.Library()
@register.filter(is_safe=True)<|fim▁hole|>@stringfilter
def render_attributes(value, autoescape=True):
"""A filter for changing a list of user attributes into a list of links,
data, etc.
"""
# TODO
# @makyo 2016-11-06 #63
if value == '':
return 'No attributes'
to_return = '<dl>'
for attribute in value.split('\n'):
k, v = attribute.split('=', 1)
if k in utils.ATTRIBUTES:
to_return += '<dt>{}</dt>'.format(utils.ATTRIBUTES[k]['dt'])
to_return += '<dd>{}</dd>'.format(
utils.ATTRIBUTES[k]['dd'].format(value=conditional_escape(v)))
to_return += '</dl>'
return mark_safe(to_return)<|fim▁end|> | |
<|file_name|>manual_typings.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | /// <reference path="firebase3/firebase3.d.ts" /> |
<|file_name|>test_GLM2_syn_2659x1049.py<|end_file_name|><|fim▁begin|>import unittest, time, sys
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_glm, h2o_import as h2i
params = {
'response': 1049,
'family': 'binomial',
'beta_epsilon': 0.0001,
'alpha': 1.0,
'lambda': 1e-05,
'n_folds': 1,
'max_iter': 20,
}
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init(1)
<|fim▁hole|>
def test_GLM2_syn_2659x1049(self):
csvFilename = "syn_2659x1049.csv"
csvPathname = 'logreg' + '/' + csvFilename
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, hex_key=csvFilename + ".hex", schema='put')
kwargs = params
glm = h2o_cmd.runGLM(parseResult=parseResult, timeoutSecs=120, **kwargs)
h2o_glm.simpleCheckGLM(self, glm, None, **kwargs)
def test_GLM2_syn_2659x1049x2enum(self):
csvFilename = "syn_2659x1049x2enum.csv"
csvPathname = 'logreg' + '/' + csvFilename
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, hex_key=csvFilename + ".hex", schema='put')
kwargs = params
glm = h2o_cmd.runGLM(parseResult=parseResult, timeoutSecs=240, **kwargs)
h2o_glm.simpleCheckGLM(self, glm, None, **kwargs)
if __name__ == '__main__':
h2o.unit_main()<|fim▁end|> | @classmethod
def tearDownClass(cls):
h2o.tear_down_cloud() |
<|file_name|>test.js<|end_file_name|><|fim▁begin|>var BUFFER_SIZE = 2048;
var div = 1;
var context = new AudioContext();
var masterNode = context.createScriptProcessor(BUFFER_SIZE*4, 2, 2);
var nodes = [];
var eqs = [];
loadSample = function(url) {
var request = new XMLHttpRequest();
request.open('GET', url, true);
request.responseType = 'arraybuffer';
request.onload = function() {
console.log('url loaded');
context.decodeAudioData(request.response, function(decodedData) {
console.log('decoded data');
var I = nodes.length;
var N = nodes[I] = context.createScriptProcessor(BUFFER_SIZE, 2, 2);
masterNode.connect(nodes[I]);
eqs[I] = new Equalizer(nodes[I], context);
N.buffer = decodedData;
N.pvL = new PhaseVocoder(BUFFER_SIZE/div, 44100); N.pvL.init();
N.pvR = new PhaseVocoder(BUFFER_SIZE/div, 44100); N.pvR.init();
N.outBufferL = [];
N.outBufferR = [];
N.position = 0;
N.pitch = 1;
N.onaudioprocess = function (e) {
var il = this.buffer.getChannelData(0);
var ir = this.buffer.getChannelData(1);
var ol = e.outputBuffer.getChannelData(0);
var or = e.outputBuffer.getChannelData(1);
// Fill output buffers (left & right) until the system has
// enough processed samples to reproduce.
do {
// var bufL = new Float64Array(BUFFER_SIZE/div);
// var bufR = new Float64Array(BUFFER_SIZE/div);
var bufL = il.subarray(this.position, this.position+BUFFER_SIZE/div);
var bufR = ir.subarray(this.position, this.position+BUFFER_SIZE/div);
this.position += this.pvL.get_analysis_hop();
// Process left input channel
this.outBufferL = this.outBufferL.concat(this.pvL.process(bufL));
// Process right input channel
this.outBufferR = this.outBufferR.concat(this.pvR.process(bufR));
} while(this.outBufferL.length < BUFFER_SIZE);
ol.set(this.outBufferL.splice(0, BUFFER_SIZE));
or.set(this.outBufferR.splice(0, BUFFER_SIZE));
};
});
}
console.log('reading url');
request.send();
}
// loadSample('../soundtouchjs/4.mp3');
// loadSample('../soundtouchjs/2.mp3');
// loadSample('../soundtouchjs/3.mp3');
function set_pitch(newPitch) {
pitch = phasevocoderL1.get_synthesis_hop()*newPitch / phasevocoderL1.get_analysis_hop();
phasevocoderL1.set_overlap_factor(pitch);
phasevocoderR1.set_overlap_factor(pitch);
}<|fim▁hole|> nodes[i].pvL.set_alpha(newFactor);
nodes[i].pvR.set_alpha(newFactor);
}
}
function set_position(ids, newPosition) {
for (var i=0; i<ids.length; i++) {
nodes[i].position = newPosition;
}
}
function play(ids) {
for (var i=0; i<ids.length; i++)
eqs[ids[i]].connect();
}
function pause(ids) {
for (var i=0; i<ids.length; i++)
eqs[ids[i]].disconnect();
}
function process_samples(input_start, buffer_size, input_channels, output_start, output_channels, rate) {
var beat, destination_offset, sample_l, sample_r, source_offset, source_offset_float;
while (--buffer_size >= 0) {
source_offset_float = input_start + (buffer_size * rate);
source_offset = Math.round(source_offset_float);
destination_offset = output_start + buffer_size;
sample_l = input_channels[0][source_offset];
sample_r = input_channels[1][source_offset];
output_channels[0][destination_offset] = sample_l;
output_channels[1][destination_offset] = sample_r;
}
return null;
};
function resample(buffer, fromRate /* or speed */, fromFrequency /* or toRate */, toRate, toFrequency) {
var argc = arguments.length,
speed = (argc === 2 ? fromRate : (argc === 3 ? fromRate / fromFrequency : toRate / fromRate * toFrequency / fromFrequency)),
l = buffer.length,
length = Math.ceil(l / speed),
newBuffer = new Array(length),
i, n;
for (i=0, n=0; i<l; i += speed) {
newBuffer[n++] = linear_interpolation(buffer, i);
}
return newBuffer;
};
function nearest_interpolation(arr, pos) {
return pos >= arr.length - 0.5 ? arr[0] : arr[Math.round(pos)];
};
function linear_interpolation(arr, pos) {
var first = Math.floor(pos),
second = first + 1,
frac = pos - first;
second = second < arr.length ? second : 0;
return arr[first] * (1 - frac) + arr[second] * frac;
};
function linearInterpolation (a, b, t) {
return a + (b - a) * t;
};
function hannWindow (length) {
var window = new Float32Array(length);
for (var i = 0; i < length; i++) {
window[i] = 0.5 * (1 - Math.cos(2 * Math.PI * i / (length - 1)));
}
return window;
};
grainSize = 512;
overlapRatio = 0.70;
pitchShifterProcessor = context.createScriptProcessor(grainSize, 1, 1);
pitchShifterProcessor.buffer = new Float32Array(grainSize * 2);
pitchShifterProcessor.grainWindow = hannWindow(grainSize);
pitchRatio = 1;
pitchShifterProcessor.onaudioprocess = function (event) {
var inputData = event.inputBuffer.getChannelData(0);
var outputData = event.outputBuffer.getChannelData(0);
for (i = 0; i < inputData.length; i++) {
// Apply the window to the input buffer
inputData[i] *= this.grainWindow[i];
// Shift half of the buffer
this.buffer[i] = this.buffer[i + grainSize];
// Empty the buffer tail
this.buffer[i + grainSize] = 0.0;
}
// Calculate the pitch shifted grain re-sampling and looping the input
var grainData = new Float32Array(grainSize * 2);
for (var i = 0, j = 0.0;
i < grainSize;
i++, j += pitchRatio) {
var index = Math.floor(j) % grainSize;
var a = inputData[index];
var b = inputData[(index + 1) % grainSize];
grainData[i] += linearInterpolation(a, b, j % 1.0) * this.grainWindow[i];
}
// Copy the grain multiple times overlapping it
for (i = 0; i < grainSize; i += Math.round(grainSize * (1 - overlapRatio))) {
for (j = 0; j <= grainSize; j++) {
this.buffer[i + j] += grainData[j];
}
}
// Output the first half of the buffer
for (i = 0; i < grainSize; i++) {
outputData[i] = this.buffer[i];
}
};<|fim▁end|> |
function set_alpha(ids, newFactor) {
for (var i=0; i<ids.length; i++) { |
<|file_name|>slice.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::cmp::Ordering::{Equal, Greater, Less};
use std::default::Default;
use std::iter::RandomAccessIterator;
use std::mem;
use std::rand::{Rng, thread_rng};
use std::rc::Rc;
use std::slice::ElementSwaps;
fn square(n: usize) -> usize { n * n }
fn is_odd(n: &usize) -> bool { *n % 2 == 1 }
#[test]
fn test_from_fn() {
// Test on-stack from_fn.
let mut v: Vec<_> = (0..3).map(square).collect();
{
let v = v;
assert_eq!(v.len(), 3);
assert_eq!(v[0], 0);
assert_eq!(v[1], 1);
assert_eq!(v[2], 4);
}
// Test on-heap from_fn.
v = (0..5).map(square).collect();
{
let v = v;
assert_eq!(v.len(), 5);
assert_eq!(v[0], 0);
assert_eq!(v[1], 1);
assert_eq!(v[2], 4);
assert_eq!(v[3], 9);
assert_eq!(v[4], 16);
}
}
#[test]
fn test_from_elem() {
// Test on-stack from_elem.
let mut v = vec![10, 10];
{
let v = v;
assert_eq!(v.len(), 2);
assert_eq!(v[0], 10);
assert_eq!(v[1], 10);
}
// Test on-heap from_elem.
v = vec![20; 6];
{
let v = v.as_slice();
assert_eq!(v[0], 20);
assert_eq!(v[1], 20);
assert_eq!(v[2], 20);
assert_eq!(v[3], 20);
assert_eq!(v[4], 20);
assert_eq!(v[5], 20);
}
}
#[test]
fn test_is_empty() {
let xs: [i32; 0] = [];
assert!(xs.is_empty());
assert!(![0].is_empty());
}
#[test]
fn test_len_divzero() {
type Z = [i8; 0];
let v0 : &[Z] = &[];
let v1 : &[Z] = &[[]];
let v2 : &[Z] = &[[], []];
assert_eq!(mem::size_of::<Z>(), 0);
assert_eq!(v0.len(), 0);
assert_eq!(v1.len(), 1);
assert_eq!(v2.len(), 2);
}
#[test]
fn test_get() {
let mut a = vec![11];
assert_eq!(a.get(1), None);
a = vec![11, 12];
assert_eq!(a.get(1).unwrap(), &12);
a = vec![11, 12, 13];
assert_eq!(a.get(1).unwrap(), &12);
}
#[test]
fn test_first() {
let mut a = vec![];
assert_eq!(a.first(), None);
a = vec![11];
assert_eq!(a.first().unwrap(), &11);
a = vec![11, 12];
assert_eq!(a.first().unwrap(), &11);
}
#[test]
fn test_first_mut() {
let mut a = vec![];
assert_eq!(a.first_mut(), None);
a = vec![11];
assert_eq!(*a.first_mut().unwrap(), 11);
a = vec![11, 12];
assert_eq!(*a.first_mut().unwrap(), 11);
}
#[test]
fn test_tail() {
let mut a = vec![11];
let b: &[i32] = &[];
assert_eq!(a.tail(), b);
a = vec![11, 12];
let b: &[i32] = &[12];
assert_eq!(a.tail(), b);
}
#[test]
fn test_tail_mut() {
let mut a = vec![11];
let b: &mut [i32] = &mut [];
assert!(a.tail_mut() == b);
a = vec![11, 12];
let b: &mut [_] = &mut [12];
assert!(a.tail_mut() == b);
}
#[test]
#[should_panic]
fn test_tail_empty() {
let a = Vec::<i32>::new();
a.tail();
}
#[test]
#[should_panic]
fn test_tail_mut_empty() {
let mut a = Vec::<i32>::new();
a.tail_mut();
}
#[test]
fn test_init() {
let mut a = vec![11];
let b: &[i32] = &[];
assert_eq!(a.init(), b);
a = vec![11, 12];
let b: &[_] = &[11];
assert_eq!(a.init(), b);
}
#[test]
fn test_init_mut() {
let mut a = vec![11];
let b: &mut [i32] = &mut [];
assert!(a.init_mut() == b);
a = vec![11, 12];
let b: &mut [_] = &mut [11];
assert!(a.init_mut() == b);
}
#[test]
#[should_panic]
fn test_init_empty() {
let a = Vec::<i32>::new();
a.init();
}
#[test]
#[should_panic]
fn test_init_mut_empty() {
let mut a = Vec::<i32>::new();
a.init_mut();
}
#[test]
fn test_last() {
let mut a = vec![];
assert_eq!(a.last(), None);
a = vec![11];
assert_eq!(a.last().unwrap(), &11);
a = vec![11, 12];
assert_eq!(a.last().unwrap(), &12);
}
#[test]
fn test_last_mut() {
let mut a = vec![];
assert_eq!(a.last_mut(), None);
a = vec![11];
assert_eq!(*a.last_mut().unwrap(), 11);
a = vec![11, 12];
assert_eq!(*a.last_mut().unwrap(), 12);
}
#[test]
fn test_slice() {
// Test fixed length vector.
let vec_fixed = [1, 2, 3, 4];
let v_a = vec_fixed[1..vec_fixed.len()].to_vec();
assert_eq!(v_a.len(), 3);
assert_eq!(v_a[0], 2);
assert_eq!(v_a[1], 3);
assert_eq!(v_a[2], 4);
// Test on stack.
let vec_stack: &[_] = &[1, 2, 3];
let v_b = vec_stack[1..3].to_vec();
assert_eq!(v_b.len(), 2);
assert_eq!(v_b[0], 2);
assert_eq!(v_b[1], 3);
// Test `Box<[T]>`
let vec_unique = vec![1, 2, 3, 4, 5, 6];
let v_d = vec_unique[1..6].to_vec();
assert_eq!(v_d.len(), 5);
assert_eq!(v_d[0], 2);
assert_eq!(v_d[1], 3);
assert_eq!(v_d[2], 4);
assert_eq!(v_d[3], 5);
assert_eq!(v_d[4], 6);
}
#[test]
fn test_slice_from() {
let vec: &[_] = &[1, 2, 3, 4];
assert_eq!(&vec[..], vec);
let b: &[_] = &[3, 4];
assert_eq!(&vec[2..], b);
let b: &[_] = &[];
assert_eq!(&vec[4..], b);
}
#[test]
fn test_slice_to() {
let vec: &[_] = &[1, 2, 3, 4];
assert_eq!(&vec[..4], vec);
let b: &[_] = &[1, 2];
assert_eq!(&vec[..2], b);
let b: &[_] = &[];
assert_eq!(&vec[..0], b);
}
#[test]
fn test_pop() {
let mut v = vec![5];
let e = v.pop();
assert_eq!(v.len(), 0);
assert_eq!(e, Some(5));
let f = v.pop();
assert_eq!(f, None);
let g = v.pop();
assert_eq!(g, None);
}
#[test]
fn test_swap_remove() {
let mut v = vec![1, 2, 3, 4, 5];
let mut e = v.swap_remove(0);
assert_eq!(e, 1);
assert_eq!(v, [5, 2, 3, 4]);
e = v.swap_remove(3);
assert_eq!(e, 4);
assert_eq!(v, [5, 2, 3]);
}
#[test]
#[should_panic]
fn test_swap_remove_fail() {
let mut v = vec![1];
let _ = v.swap_remove(0);
let _ = v.swap_remove(0);
}
#[test]
fn test_swap_remove_noncopyable() {
// Tests that we don't accidentally run destructors twice.
let mut v: Vec<Box<_>> = Vec::new();
v.push(box 0u8);
v.push(box 0u8);
v.push(box 0u8);
let mut _e = v.swap_remove(0);
assert_eq!(v.len(), 2);
_e = v.swap_remove(1);
assert_eq!(v.len(), 1);
_e = v.swap_remove(0);
assert_eq!(v.len(), 0);
}
#[test]
fn test_push() {
// Test on-stack push().
let mut v = vec![];
v.push(1);
assert_eq!(v.len(), 1);
assert_eq!(v[0], 1);
// Test on-heap push().
v.push(2);
assert_eq!(v.len(), 2);
assert_eq!(v[0], 1);
assert_eq!(v[1], 2);
}
#[test]
fn test_truncate() {
let mut v: Vec<Box<_>> = vec![box 6,box 5,box 4];
v.truncate(1);
let v = v;
assert_eq!(v.len(), 1);
assert_eq!(*(v[0]), 6);
// If the unsafe block didn't drop things properly, we blow up here.
}
#[test]
fn test_clear() {
let mut v: Vec<Box<_>> = vec![box 6,box 5,box 4];
v.clear();
assert_eq!(v.len(), 0);
// If the unsafe block didn't drop things properly, we blow up here.
}
#[test]
fn test_dedup() {
fn case(a: Vec<i32>, b: Vec<i32>) {
let mut v = a;
v.dedup();
assert_eq!(v, b);
}
case(vec![], vec![]);
case(vec![1], vec![1]);
case(vec![1,1], vec![1]);
case(vec![1,2,3], vec![1,2,3]);
case(vec![1,1,2,3], vec![1,2,3]);
case(vec![1,2,2,3], vec![1,2,3]);
case(vec![1,2,3,3], vec![1,2,3]);
case(vec![1,1,2,2,2,3,3], vec![1,2,3]);
}
#[test]
fn test_dedup_unique() {
let mut v0: Vec<Box<_>> = vec![box 1, box 1, box 2, box 3];
v0.dedup();
let mut v1: Vec<Box<_>> = vec![box 1, box 2, box 2, box 3];
v1.dedup();
let mut v2: Vec<Box<_>> = vec![box 1, box 2, box 3, box 3];
v2.dedup();
/*
* If the boxed pointers were leaked or otherwise misused, valgrind
* and/or rt should raise errors.
*/
}
#[test]
fn test_dedup_shared() {
let mut v0: Vec<Box<_>> = vec![box 1, box 1, box 2, box 3];
v0.dedup();
let mut v1: Vec<Box<_>> = vec![box 1, box 2, box 2, box 3];
v1.dedup();
let mut v2: Vec<Box<_>> = vec![box 1, box 2, box 3, box 3];
v2.dedup();
/*
* If the pointers were leaked or otherwise misused, valgrind and/or
* rt should raise errors.
*/
}
#[test]
fn test_retain() {
let mut v = vec![1, 2, 3, 4, 5];
v.retain(is_odd);
assert_eq!(v, [1, 3, 5]);
}
#[test]
fn test_element_swaps() {
let mut v = [1, 2, 3];
for (i, (a, b)) in ElementSwaps::new(v.len()).enumerate() {
v.swap(a, b);
match i {
0 => assert!(v == [1, 3, 2]),
1 => assert!(v == [3, 1, 2]),
2 => assert!(v == [3, 2, 1]),
3 => assert!(v == [2, 3, 1]),
4 => assert!(v == [2, 1, 3]),
5 => assert!(v == [1, 2, 3]),
_ => panic!(),
}
}
}
#[test]
fn test_lexicographic_permutations() {
let v : &mut[_] = &mut[1, 2, 3, 4, 5];
assert!(v.prev_permutation() == false);
assert!(v.next_permutation());
let b: &mut[_] = &mut[1, 2, 3, 5, 4];
assert!(v == b);
assert!(v.prev_permutation());
let b: &mut[_] = &mut[1, 2, 3, 4, 5];
assert!(v == b);
assert!(v.next_permutation());
assert!(v.next_permutation());
let b: &mut[_] = &mut[1, 2, 4, 3, 5];
assert!(v == b);
assert!(v.next_permutation());
let b: &mut[_] = &mut[1, 2, 4, 5, 3];
assert!(v == b);
let v : &mut[_] = &mut[1, 0, 0, 0];
assert!(v.next_permutation() == false);
assert!(v.prev_permutation());
let b: &mut[_] = &mut[0, 1, 0, 0];
assert!(v == b);
assert!(v.prev_permutation());
let b: &mut[_] = &mut[0, 0, 1, 0];
assert!(v == b);
assert!(v.prev_permutation());
let b: &mut[_] = &mut[0, 0, 0, 1];
assert!(v == b);
assert!(v.prev_permutation() == false);
}
#[test]
fn test_lexicographic_permutations_empty_and_short() {
let empty : &mut[i32] = &mut[];
assert!(empty.next_permutation() == false);
let b: &mut[i32] = &mut[];
assert!(empty == b);
assert!(empty.prev_permutation() == false);
assert!(empty == b);
let one_elem : &mut[_] = &mut[4];
assert!(one_elem.prev_permutation() == false);
let b: &mut[_] = &mut[4];
assert!(one_elem == b);
assert!(one_elem.next_permutation() == false);
assert!(one_elem == b);
let two_elem : &mut[_] = &mut[1, 2];
assert!(two_elem.prev_permutation() == false);
let b : &mut[_] = &mut[1, 2];
let c : &mut[_] = &mut[2, 1];
assert!(two_elem == b);
assert!(two_elem.next_permutation());
assert!(two_elem == c);
assert!(two_elem.next_permutation() == false);
assert!(two_elem == c);
assert!(two_elem.prev_permutation());
assert!(two_elem == b);
assert!(two_elem.prev_permutation() == false);
assert!(two_elem == b);
}
#[test]
fn test_position_elem() {
assert!([].position_elem(&1).is_none());
let v1 = vec![1, 2, 3, 3, 2, 5];
assert_eq!(v1.position_elem(&1), Some(0));
assert_eq!(v1.position_elem(&2), Some(1));
assert_eq!(v1.position_elem(&5), Some(5));
assert!(v1.position_elem(&4).is_none());
}
#[test]
fn test_binary_search() {
assert_eq!([1,2,3,4,5].binary_search(&5).ok(), Some(4));
assert_eq!([1,2,3,4,5].binary_search(&4).ok(), Some(3));
assert_eq!([1,2,3,4,5].binary_search(&3).ok(), Some(2));
assert_eq!([1,2,3,4,5].binary_search(&2).ok(), Some(1));
assert_eq!([1,2,3,4,5].binary_search(&1).ok(), Some(0));
assert_eq!([2,4,6,8,10].binary_search(&1).ok(), None);
assert_eq!([2,4,6,8,10].binary_search(&5).ok(), None);
assert_eq!([2,4,6,8,10].binary_search(&4).ok(), Some(1));
assert_eq!([2,4,6,8,10].binary_search(&10).ok(), Some(4));
assert_eq!([2,4,6,8].binary_search(&1).ok(), None);
assert_eq!([2,4,6,8].binary_search(&5).ok(), None);
assert_eq!([2,4,6,8].binary_search(&4).ok(), Some(1));
assert_eq!([2,4,6,8].binary_search(&8).ok(), Some(3));
assert_eq!([2,4,6].binary_search(&1).ok(), None);
assert_eq!([2,4,6].binary_search(&5).ok(), None);
assert_eq!([2,4,6].binary_search(&4).ok(), Some(1));
assert_eq!([2,4,6].binary_search(&6).ok(), Some(2));
assert_eq!([2,4].binary_search(&1).ok(), None);
assert_eq!([2,4].binary_search(&5).ok(), None);
assert_eq!([2,4].binary_search(&2).ok(), Some(0));
assert_eq!([2,4].binary_search(&4).ok(), Some(1));
assert_eq!([2].binary_search(&1).ok(), None);
assert_eq!([2].binary_search(&5).ok(), None);
assert_eq!([2].binary_search(&2).ok(), Some(0));
assert_eq!([].binary_search(&1).ok(), None);
assert_eq!([].binary_search(&5).ok(), None);
assert!([1,1,1,1,1].binary_search(&1).ok() != None);
assert!([1,1,1,1,2].binary_search(&1).ok() != None);
assert!([1,1,1,2,2].binary_search(&1).ok() != None);
assert!([1,1,2,2,2].binary_search(&1).ok() != None);
assert_eq!([1,2,2,2,2].binary_search(&1).ok(), Some(0));
assert_eq!([1,2,3,4,5].binary_search(&6).ok(), None);
assert_eq!([1,2,3,4,5].binary_search(&0).ok(), None);
}
#[test]
fn test_reverse() {
let mut v = vec![10, 20];
assert_eq!(v[0], 10);
assert_eq!(v[1], 20);
v.reverse();
assert_eq!(v[0], 20);
assert_eq!(v[1], 10);
let mut v3 = Vec::<i32>::new();
v3.reverse();
assert!(v3.is_empty());
}
#[test]
fn test_sort() {
for len in 4..25 {
for _ in 0..100 {
let mut v: Vec<_> = thread_rng().gen_iter::<i32>().take(len).collect();
let mut v1 = v.clone();
v.sort();
assert!(v.windows(2).all(|w| w[0] <= w[1]));
v1.sort_by(|a, b| a.cmp(b));
assert!(v1.windows(2).all(|w| w[0] <= w[1]));
v1.sort_by(|a, b| b.cmp(a));
assert!(v1.windows(2).all(|w| w[0] >= w[1]));
}
}
// shouldn't panic
let mut v: [i32; 0] = [];
v.sort();
let mut v = [0xDEADBEEFu64];
v.sort();
assert!(v == [0xDEADBEEF]);
}
#[test]
fn test_sort_stability() {
for len in 4..25 {
for _ in 0..10 {
let mut counts = [0; 10];
// create a vector like [(6, 1), (5, 1), (6, 2), ...],
// where the first item of each tuple is random, but
// the second item represents which occurrence of that
// number this element is, i.e. the second elements
// will occur in sorted order.
let mut v: Vec<_> = (0..len).map(|_| {
let n = thread_rng().gen::<usize>() % 10;
counts[n] += 1;
(n, counts[n])
}).collect();
// only sort on the first element, so an unstable sort
// may mix up the counts.
v.sort_by(|&(a,_), &(b,_)| a.cmp(&b));
// this comparison includes the count (the second item
// of the tuple), so elements with equal first items
// will need to be ordered with increasing
// counts... i.e. exactly asserting that this sort is
// stable.
assert!(v.windows(2).all(|w| w[0] <= w[1]));
}
}
}
#[test]
fn test_concat() {
let v: [Vec<i32>; 0] = [];
let c = v.concat();
assert_eq!(c, []);
let d = [vec![1], vec![2, 3]].concat();
assert_eq!(d, [1, 2, 3]);
let v: &[&[_]] = &[&[1], &[2, 3]];
assert_eq!(v.connect(&0), [1, 0, 2, 3]);
let v: &[&[_]] = &[&[1], &[2], &[3]];
assert_eq!(v.connect(&0), [1, 0, 2, 0, 3]);
}
#[test]
fn test_connect() {
let v: [Vec<i32>; 0] = [];
assert_eq!(v.connect(&0), []);
assert_eq!([vec![1], vec![2, 3]].connect(&0), [1, 0, 2, 3]);
assert_eq!([vec![1], vec![2], vec![3]].connect(&0), [1, 0, 2, 0, 3]);
let v: [&[_]; 2] = [&[1], &[2, 3]];
assert_eq!(v.connect(&0), [1, 0, 2, 3]);
let v: [&[_]; 3] = [&[1], &[2], &[3]];
assert_eq!(v.connect(&0), [1, 0, 2, 0, 3]);
}
#[test]
fn test_insert() {
let mut a = vec![1, 2, 4];
a.insert(2, 3);
assert_eq!(a, [1, 2, 3, 4]);
let mut a = vec![1, 2, 3];
a.insert(0, 0);
assert_eq!(a, [0, 1, 2, 3]);<|fim▁hole|>
let mut a = vec![];
a.insert(0, 1);
assert_eq!(a, [1]);
}
#[test]
#[should_panic]
fn test_insert_oob() {
let mut a = vec![1, 2, 3];
a.insert(4, 5);
}
#[test]
fn test_remove() {
let mut a = vec![1, 2, 3, 4];
assert_eq!(a.remove(2), 3);
assert_eq!(a, [1, 2, 4]);
assert_eq!(a.remove(2), 4);
assert_eq!(a, [1, 2]);
assert_eq!(a.remove(0), 1);
assert_eq!(a, [2]);
assert_eq!(a.remove(0), 2);
assert_eq!(a, []);
}
#[test]
#[should_panic]
fn test_remove_fail() {
let mut a = vec![1];
let _ = a.remove(0);
let _ = a.remove(0);
}
#[test]
fn test_capacity() {
let mut v = vec![0];
v.reserve_exact(10);
assert!(v.capacity() >= 11);
}
#[test]
fn test_slice_2() {
let v = vec![1, 2, 3, 4, 5];
let v = v.slice(1, 3);
assert_eq!(v.len(), 2);
assert_eq!(v[0], 2);
assert_eq!(v[1], 3);
}
#[test]
#[should_panic]
fn test_permute_fail() {
let v: [(Box<_>, Rc<_>); 4] =
[(box 0, Rc::new(0)), (box 0, Rc::new(0)),
(box 0, Rc::new(0)), (box 0, Rc::new(0))];
let mut i = 0;
for _ in v.permutations() {
if i == 2 {
panic!()
}
i += 1;
}
}
#[test]
fn test_total_ord() {
let c = &[1, 2, 3];
[1, 2, 3, 4][..].cmp(c) == Greater;
let c = &[1, 2, 3, 4];
[1, 2, 3][..].cmp(c) == Less;
let c = &[1, 2, 3, 6];
[1, 2, 3, 4][..].cmp(c) == Equal;
let c = &[1, 2, 3, 4, 5, 6];
[1, 2, 3, 4, 5, 5, 5, 5][..].cmp(c) == Less;
let c = &[1, 2, 3, 4];
[2, 2][..].cmp(c) == Greater;
}
#[test]
fn test_iterator() {
let xs = [1, 2, 5, 10, 11];
let mut it = xs.iter();
assert_eq!(it.size_hint(), (5, Some(5)));
assert_eq!(it.next().unwrap(), &1);
assert_eq!(it.size_hint(), (4, Some(4)));
assert_eq!(it.next().unwrap(), &2);
assert_eq!(it.size_hint(), (3, Some(3)));
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.size_hint(), (2, Some(2)));
assert_eq!(it.next().unwrap(), &10);
assert_eq!(it.size_hint(), (1, Some(1)));
assert_eq!(it.next().unwrap(), &11);
assert_eq!(it.size_hint(), (0, Some(0)));
assert!(it.next().is_none());
}
#[test]
fn test_random_access_iterator() {
let xs = [1, 2, 5, 10, 11];
let mut it = xs.iter();
assert_eq!(it.indexable(), 5);
assert_eq!(it.idx(0).unwrap(), &1);
assert_eq!(it.idx(2).unwrap(), &5);
assert_eq!(it.idx(4).unwrap(), &11);
assert!(it.idx(5).is_none());
assert_eq!(it.next().unwrap(), &1);
assert_eq!(it.indexable(), 4);
assert_eq!(it.idx(0).unwrap(), &2);
assert_eq!(it.idx(3).unwrap(), &11);
assert!(it.idx(4).is_none());
assert_eq!(it.next().unwrap(), &2);
assert_eq!(it.indexable(), 3);
assert_eq!(it.idx(1).unwrap(), &10);
assert!(it.idx(3).is_none());
assert_eq!(it.next().unwrap(), &5);
assert_eq!(it.indexable(), 2);
assert_eq!(it.idx(1).unwrap(), &11);
assert_eq!(it.next().unwrap(), &10);
assert_eq!(it.indexable(), 1);
assert_eq!(it.idx(0).unwrap(), &11);
assert!(it.idx(1).is_none());
assert_eq!(it.next().unwrap(), &11);
assert_eq!(it.indexable(), 0);
assert!(it.idx(0).is_none());
assert!(it.next().is_none());
}
#[test]
fn test_iter_size_hints() {
let mut xs = [1, 2, 5, 10, 11];
assert_eq!(xs.iter().size_hint(), (5, Some(5)));
assert_eq!(xs.iter_mut().size_hint(), (5, Some(5)));
}
#[test]
fn test_iter_clone() {
let xs = [1, 2, 5];
let mut it = xs.iter();
it.next();
let mut jt = it.clone();
assert_eq!(it.next(), jt.next());
assert_eq!(it.next(), jt.next());
assert_eq!(it.next(), jt.next());
}
#[test]
fn test_mut_iterator() {
let mut xs = [1, 2, 3, 4, 5];
for x in &mut xs {
*x += 1;
}
assert!(xs == [2, 3, 4, 5, 6])
}
#[test]
fn test_rev_iterator() {
let xs = [1, 2, 5, 10, 11];
let ys = [11, 10, 5, 2, 1];
let mut i = 0;
for &x in xs.iter().rev() {
assert_eq!(x, ys[i]);
i += 1;
}
assert_eq!(i, 5);
}
#[test]
fn test_mut_rev_iterator() {
let mut xs = [1, 2, 3, 4, 5];
for (i,x) in xs.iter_mut().rev().enumerate() {
*x += i;
}
assert!(xs == [5, 5, 5, 5, 5])
}
#[test]
fn test_move_iterator() {
let xs = vec![1,2,3,4,5];
assert_eq!(xs.into_iter().fold(0, |a: usize, b: usize| 10*a + b), 12345);
}
#[test]
fn test_move_rev_iterator() {
let xs = vec![1,2,3,4,5];
assert_eq!(xs.into_iter().rev().fold(0, |a: usize, b: usize| 10*a + b), 54321);
}
#[test]
fn test_splitator() {
let xs = &[1,2,3,4,5];
let splits: &[&[_]] = &[&[1], &[3], &[5]];
assert_eq!(xs.split(|x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[], &[2,3,4,5]];
assert_eq!(xs.split(|x| *x == 1).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[1,2,3,4], &[]];
assert_eq!(xs.split(|x| *x == 5).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[1,2,3,4,5]];
assert_eq!(xs.split(|x| *x == 10).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[], &[], &[], &[], &[], &[]];
assert_eq!(xs.split(|_| true).collect::<Vec<&[i32]>>(),
splits);
let xs: &[i32] = &[];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.split(|x| *x == 5).collect::<Vec<&[i32]>>(), splits);
}
#[test]
fn test_splitnator() {
let xs = &[1,2,3,4,5];
let splits: &[&[_]] = &[&[1,2,3,4,5]];
assert_eq!(xs.splitn(0, |x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[1], &[3,4,5]];
assert_eq!(xs.splitn(1, |x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[], &[], &[], &[4,5]];
assert_eq!(xs.splitn(3, |_| true).collect::<Vec<_>>(),
splits);
let xs: &[i32] = &[];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.splitn(1, |x| *x == 5).collect::<Vec<_>>(), splits);
}
#[test]
fn test_splitnator_mut() {
let xs = &mut [1,2,3,4,5];
let splits: &[&mut[_]] = &[&mut [1,2,3,4,5]];
assert_eq!(xs.splitn_mut(0, |x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&mut[_]] = &[&mut [1], &mut [3,4,5]];
assert_eq!(xs.splitn_mut(1, |x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&mut[_]] = &[&mut [], &mut [], &mut [], &mut [4,5]];
assert_eq!(xs.splitn_mut(3, |_| true).collect::<Vec<_>>(),
splits);
let xs: &mut [i32] = &mut [];
let splits: &[&mut[i32]] = &[&mut []];
assert_eq!(xs.splitn_mut(1, |x| *x == 5).collect::<Vec<_>>(),
splits);
}
#[test]
fn test_rsplitator() {
let xs = &[1,2,3,4,5];
let splits: &[&[_]] = &[&[5], &[3], &[1]];
assert_eq!(xs.split(|x| *x % 2 == 0).rev().collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[2,3,4,5], &[]];
assert_eq!(xs.split(|x| *x == 1).rev().collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[], &[1,2,3,4]];
assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[1,2,3,4,5]];
assert_eq!(xs.split(|x| *x == 10).rev().collect::<Vec<_>>(),
splits);
let xs: &[i32] = &[];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.split(|x| *x == 5).rev().collect::<Vec<&[i32]>>(), splits);
}
#[test]
fn test_rsplitnator() {
let xs = &[1,2,3,4,5];
let splits: &[&[_]] = &[&[1,2,3,4,5]];
assert_eq!(xs.rsplitn(0, |x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[5], &[1,2,3]];
assert_eq!(xs.rsplitn(1, |x| *x % 2 == 0).collect::<Vec<_>>(),
splits);
let splits: &[&[_]] = &[&[], &[], &[], &[1,2]];
assert_eq!(xs.rsplitn(3, |_| true).collect::<Vec<_>>(),
splits);
let xs: &[i32] = &[];
let splits: &[&[i32]] = &[&[]];
assert_eq!(xs.rsplitn(1, |x| *x == 5).collect::<Vec<&[i32]>>(), splits);
}
#[test]
fn test_windowsator() {
let v = &[1,2,3,4];
let wins: &[&[_]] = &[&[1,2], &[2,3], &[3,4]];
assert_eq!(v.windows(2).collect::<Vec<_>>(), wins);
let wins: &[&[_]] = &[&[1,2,3], &[2,3,4]];
assert_eq!(v.windows(3).collect::<Vec<_>>(), wins);
assert!(v.windows(6).next().is_none());
let wins: &[&[_]] = &[&[3,4], &[2,3], &[1,2]];
assert_eq!(v.windows(2).rev().collect::<Vec<&[_]>>(), wins);
let mut it = v.windows(2);
assert_eq!(it.indexable(), 3);
let win: &[_] = &[1,2];
assert_eq!(it.idx(0).unwrap(), win);
let win: &[_] = &[2,3];
assert_eq!(it.idx(1).unwrap(), win);
let win: &[_] = &[3,4];
assert_eq!(it.idx(2).unwrap(), win);
assert_eq!(it.idx(3), None);
}
#[test]
#[should_panic]
fn test_windowsator_0() {
let v = &[1,2,3,4];
let _it = v.windows(0);
}
#[test]
fn test_chunksator() {
let v = &[1,2,3,4,5];
assert_eq!(v.chunks(2).len(), 3);
let chunks: &[&[_]] = &[&[1,2], &[3,4], &[5]];
assert_eq!(v.chunks(2).collect::<Vec<_>>(), chunks);
let chunks: &[&[_]] = &[&[1,2,3], &[4,5]];
assert_eq!(v.chunks(3).collect::<Vec<_>>(), chunks);
let chunks: &[&[_]] = &[&[1,2,3,4,5]];
assert_eq!(v.chunks(6).collect::<Vec<_>>(), chunks);
let chunks: &[&[_]] = &[&[5], &[3,4], &[1,2]];
assert_eq!(v.chunks(2).rev().collect::<Vec<_>>(), chunks);
let mut it = v.chunks(2);
assert_eq!(it.indexable(), 3);
let chunk: &[_] = &[1,2];
assert_eq!(it.idx(0).unwrap(), chunk);
let chunk: &[_] = &[3,4];
assert_eq!(it.idx(1).unwrap(), chunk);
let chunk: &[_] = &[5];
assert_eq!(it.idx(2).unwrap(), chunk);
assert_eq!(it.idx(3), None);
}
#[test]
#[should_panic]
fn test_chunksator_0() {
let v = &[1,2,3,4];
let _it = v.chunks(0);
}
#[test]
fn test_move_from() {
let mut a = [1,2,3,4,5];
let b = vec![6,7,8];
assert_eq!(a.move_from(b, 0, 3), 3);
assert!(a == [6,7,8,4,5]);
let mut a = [7,2,8,1];
let b = vec![3,1,4,1,5,9];
assert_eq!(a.move_from(b, 0, 6), 4);
assert!(a == [3,1,4,1]);
let mut a = [1,2,3,4];
let b = vec![5,6,7,8,9,0];
assert_eq!(a.move_from(b, 2, 3), 1);
assert!(a == [7,2,3,4]);
let mut a = [1,2,3,4,5];
let b = vec![5,6,7,8,9,0];
assert_eq!(a[2..4].move_from(b,1,6), 2);
assert!(a == [1,2,6,7,5]);
}
#[test]
fn test_reverse_part() {
let mut values = [1,2,3,4,5];
values[1..4].reverse();
assert!(values == [1,4,3,2,5]);
}
#[test]
fn test_show() {
macro_rules! test_show_vec {
($x:expr, $x_str:expr) => ({
let (x, x_str) = ($x, $x_str);
assert_eq!(format!("{:?}", x), x_str);
assert_eq!(format!("{:?}", x), x_str);
})
}
let empty = Vec::<i32>::new();
test_show_vec!(empty, "[]");
test_show_vec!(vec![1], "[1]");
test_show_vec!(vec![1, 2, 3], "[1, 2, 3]");
test_show_vec!(vec![vec![], vec![1], vec![1, 1]],
"[[], [1], [1, 1]]");
let empty_mut: &mut [i32] = &mut[];
test_show_vec!(empty_mut, "[]");
let v = &mut[1];
test_show_vec!(v, "[1]");
let v = &mut[1, 2, 3];
test_show_vec!(v, "[1, 2, 3]");
let v: &mut[&mut[_]] = &mut[&mut[], &mut[1], &mut[1, 1]];
test_show_vec!(v, "[[], [1], [1, 1]]");
}
#[test]
fn test_vec_default() {
macro_rules! t {
($ty:ty) => {{
let v: $ty = Default::default();
assert!(v.is_empty());
}}
}
t!(&[i32]);
t!(Vec<i32>);
}
#[test]
fn test_bytes_set_memory() {
use std::slice::bytes::MutableByteVector;
let mut values = [1,2,3,4,5];
values[0..5].set_memory(0xAB);
assert!(values == [0xAB, 0xAB, 0xAB, 0xAB, 0xAB]);
values[2..4].set_memory(0xFF);
assert!(values == [0xAB, 0xAB, 0xFF, 0xFF, 0xAB]);
}
#[test]
#[should_panic]
fn test_overflow_does_not_cause_segfault() {
let mut v = vec![];
v.reserve_exact(-1);
v.push(1);
v.push(2);
}
#[test]
#[should_panic]
fn test_overflow_does_not_cause_segfault_managed() {
let mut v = vec![Rc::new(1)];
v.reserve_exact(-1);
v.push(Rc::new(2));
}
#[test]
fn test_mut_split_at() {
let mut values = [1u8,2,3,4,5];
{
let (left, right) = values.split_at_mut(2);
{
let left: &[_] = left;
assert!(left[..left.len()] == [1, 2]);
}
for p in left {
*p += 1;
}
{
let right: &[_] = right;
assert!(right[..right.len()] == [3, 4, 5]);
}
for p in right {
*p += 2;
}
}
assert!(values == [2, 3, 5, 6, 7]);
}
#[derive(Clone, PartialEq)]
struct Foo;
#[test]
fn test_iter_zero_sized() {
let mut v = vec![Foo, Foo, Foo];
assert_eq!(v.len(), 3);
let mut cnt = 0;
for f in &v {
assert!(*f == Foo);
cnt += 1;
}
assert_eq!(cnt, 3);
for f in &v[1..3] {
assert!(*f == Foo);
cnt += 1;
}
assert_eq!(cnt, 5);
for f in &mut v {
assert!(*f == Foo);
cnt += 1;
}
assert_eq!(cnt, 8);
for f in v {
assert!(f == Foo);
cnt += 1;
}
assert_eq!(cnt, 11);
let xs: [Foo; 3] = [Foo, Foo, Foo];
cnt = 0;
for f in &xs {
assert!(*f == Foo);
cnt += 1;
}
assert!(cnt == 3);
}
#[test]
fn test_shrink_to_fit() {
let mut xs = vec![0, 1, 2, 3];
for i in 4..100 {
xs.push(i)
}
assert_eq!(xs.capacity(), 128);
xs.shrink_to_fit();
assert_eq!(xs.capacity(), 100);
assert_eq!(xs, (0..100).collect::<Vec<_>>());
}
#[test]
fn test_starts_with() {
assert!(b"foobar".starts_with(b"foo"));
assert!(!b"foobar".starts_with(b"oob"));
assert!(!b"foobar".starts_with(b"bar"));
assert!(!b"foo".starts_with(b"foobar"));
assert!(!b"bar".starts_with(b"foobar"));
assert!(b"foobar".starts_with(b"foobar"));
let empty: &[u8] = &[];
assert!(empty.starts_with(empty));
assert!(!empty.starts_with(b"foo"));
assert!(b"foobar".starts_with(empty));
}
#[test]
fn test_ends_with() {
assert!(b"foobar".ends_with(b"bar"));
assert!(!b"foobar".ends_with(b"oba"));
assert!(!b"foobar".ends_with(b"foo"));
assert!(!b"foo".ends_with(b"foobar"));
assert!(!b"bar".ends_with(b"foobar"));
assert!(b"foobar".ends_with(b"foobar"));
let empty: &[u8] = &[];
assert!(empty.ends_with(empty));
assert!(!empty.ends_with(b"foo"));
assert!(b"foobar".ends_with(empty));
}
#[test]
fn test_mut_splitator() {
let mut xs = [0,1,0,2,3,0,0,4,5,0];
assert_eq!(xs.split_mut(|x| *x == 0).count(), 6);
for slice in xs.split_mut(|x| *x == 0) {
slice.reverse();
}
assert!(xs == [0,1,0,3,2,0,0,5,4,0]);
let mut xs = [0,1,0,2,3,0,0,4,5,0,6,7];
for slice in xs.split_mut(|x| *x == 0).take(5) {
slice.reverse();
}
assert!(xs == [0,1,0,3,2,0,0,5,4,0,6,7]);
}
#[test]
fn test_mut_splitator_rev() {
let mut xs = [1,2,0,3,4,0,0,5,6,0];
for slice in xs.split_mut(|x| *x == 0).rev().take(4) {
slice.reverse();
}
assert!(xs == [1,2,0,4,3,0,0,6,5,0]);
}
#[test]
fn test_get_mut() {
let mut v = [0,1,2];
assert_eq!(v.get_mut(3), None);
v.get_mut(1).map(|e| *e = 7);
assert_eq!(v[1], 7);
let mut x = 2;
assert_eq!(v.get_mut(2), Some(&mut x));
}
#[test]
fn test_mut_chunks() {
let mut v = [0, 1, 2, 3, 4, 5, 6];
assert_eq!(v.chunks_mut(2).len(), 4);
for (i, chunk) in v.chunks_mut(3).enumerate() {
for x in chunk {
*x = i as u8;
}
}
let result = [0, 0, 0, 1, 1, 1, 2];
assert!(v == result);
}
#[test]
fn test_mut_chunks_rev() {
let mut v = [0, 1, 2, 3, 4, 5, 6];
for (i, chunk) in v.chunks_mut(3).rev().enumerate() {
for x in chunk {
*x = i as u8;
}
}
let result = [2, 2, 2, 1, 1, 1, 0];
assert!(v == result);
}
#[test]
#[should_panic]
fn test_mut_chunks_0() {
let mut v = [1, 2, 3, 4];
let _it = v.chunks_mut(0);
}
#[test]
fn test_mut_last() {
let mut x = [1, 2, 3, 4, 5];
let h = x.last_mut();
assert_eq!(*h.unwrap(), 5);
let y: &mut [i32] = &mut [];
assert!(y.last_mut().is_none());
}
#[test]
fn test_to_vec() {
let xs: Box<_> = box [1, 2, 3];
let ys = xs.to_vec();
assert_eq!(ys, [1, 2, 3]);
}
mod bench {
use std::iter::repeat;
use std::{mem, ptr};
use std::rand::{Rng, weak_rng};
use test::{Bencher, black_box};
#[bench]
fn iterator(b: &mut Bencher) {
// peculiar numbers to stop LLVM from optimising the summation
// out.
let v: Vec<_> = (0..100).map(|i| i ^ (i << 1) ^ (i >> 1)).collect();
b.iter(|| {
let mut sum = 0;
for x in &v {
sum += *x;
}
// sum == 11806, to stop dead code elimination.
if sum == 0 {panic!()}
})
}
#[bench]
fn mut_iterator(b: &mut Bencher) {
let mut v: Vec<_> = repeat(0).take(100).collect();
b.iter(|| {
let mut i = 0;
for x in &mut v {
*x = i;
i += 1;
}
})
}
#[bench]
fn concat(b: &mut Bencher) {
let xss: Vec<Vec<i32>> =
(0..100).map(|i| (0..i).collect()).collect();
b.iter(|| {
xss.concat();
});
}
#[bench]
fn connect(b: &mut Bencher) {
let xss: Vec<Vec<i32>> =
(0..100).map(|i| (0..i).collect()).collect();
b.iter(|| {
xss.connect(&0)
});
}
#[bench]
fn push(b: &mut Bencher) {
let mut vec = Vec::<i32>::new();
b.iter(|| {
vec.push(0);
black_box(&vec);
});
}
#[bench]
fn starts_with_same_vector(b: &mut Bencher) {
let vec: Vec<_> = (0..100).collect();
b.iter(|| {
vec.starts_with(&vec)
})
}
#[bench]
fn starts_with_single_element(b: &mut Bencher) {
let vec: Vec<_> = vec![0];
b.iter(|| {
vec.starts_with(&vec)
})
}
#[bench]
fn starts_with_diff_one_element_at_end(b: &mut Bencher) {
let vec: Vec<_> = (0..100).collect();
let mut match_vec: Vec<_> = (0..99).collect();
match_vec.push(0);
b.iter(|| {
vec.starts_with(&match_vec)
})
}
#[bench]
fn ends_with_same_vector(b: &mut Bencher) {
let vec: Vec<_> = (0..100).collect();
b.iter(|| {
vec.ends_with(&vec)
})
}
#[bench]
fn ends_with_single_element(b: &mut Bencher) {
let vec: Vec<_> = vec![0];
b.iter(|| {
vec.ends_with(&vec)
})
}
#[bench]
fn ends_with_diff_one_element_at_beginning(b: &mut Bencher) {
let vec: Vec<_> = (0..100).collect();
let mut match_vec: Vec<_> = (0..100).collect();
match_vec[0] = 200;
b.iter(|| {
vec.starts_with(&match_vec)
})
}
#[bench]
fn contains_last_element(b: &mut Bencher) {
let vec: Vec<_> = (0..100).collect();
b.iter(|| {
vec.contains(&99)
})
}
#[bench]
fn zero_1kb_from_elem(b: &mut Bencher) {
b.iter(|| {
repeat(0u8).take(1024).collect::<Vec<_>>()
});
}
#[bench]
fn zero_1kb_set_memory(b: &mut Bencher) {
b.iter(|| {
let mut v = Vec::<u8>::with_capacity(1024);
unsafe {
let vp = v.as_mut_ptr();
ptr::set_memory(vp, 0, 1024);
v.set_len(1024);
}
v
});
}
#[bench]
fn zero_1kb_loop_set(b: &mut Bencher) {
b.iter(|| {
let mut v = Vec::<u8>::with_capacity(1024);
unsafe {
v.set_len(1024);
}
for i in 0..1024 {
v[i] = 0;
}
});
}
#[bench]
fn zero_1kb_mut_iter(b: &mut Bencher) {
b.iter(|| {
let mut v = Vec::<u8>::with_capacity(1024);
unsafe {
v.set_len(1024);
}
for x in &mut v {
*x = 0;
}
v
});
}
#[bench]
fn random_inserts(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v: Vec<_> = repeat((0, 0)).take(30).collect();
for _ in 0..100 {
let l = v.len();
v.insert(rng.gen::<usize>() % (l + 1),
(1, 1));
}
})
}
#[bench]
fn random_removes(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v: Vec<_> = repeat((0, 0)).take(130).collect();
for _ in 0..100 {
let l = v.len();
v.remove(rng.gen::<usize>() % l);
}
})
}
#[bench]
fn sort_random_small(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v: Vec<_> = rng.gen_iter::<u64>().take(5).collect();
v.sort();
});
b.bytes = 5 * mem::size_of::<u64>() as u64;
}
#[bench]
fn sort_random_medium(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v: Vec<_> = rng.gen_iter::<u64>().take(100).collect();
v.sort();
});
b.bytes = 100 * mem::size_of::<u64>() as u64;
}
#[bench]
fn sort_random_large(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v: Vec<_> = rng.gen_iter::<u64>().take(10000).collect();
v.sort();
});
b.bytes = 10000 * mem::size_of::<u64>() as u64;
}
#[bench]
fn sort_sorted(b: &mut Bencher) {
let mut v: Vec<_> = (0..10000).collect();
b.iter(|| {
v.sort();
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
type BigSortable = (u64, u64, u64, u64);
#[bench]
fn sort_big_random_small(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v = rng.gen_iter::<BigSortable>().take(5)
.collect::<Vec<BigSortable>>();
v.sort();
});
b.bytes = 5 * mem::size_of::<BigSortable>() as u64;
}
#[bench]
fn sort_big_random_medium(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v = rng.gen_iter::<BigSortable>().take(100)
.collect::<Vec<BigSortable>>();
v.sort();
});
b.bytes = 100 * mem::size_of::<BigSortable>() as u64;
}
#[bench]
fn sort_big_random_large(b: &mut Bencher) {
let mut rng = weak_rng();
b.iter(|| {
let mut v = rng.gen_iter::<BigSortable>().take(10000)
.collect::<Vec<BigSortable>>();
v.sort();
});
b.bytes = 10000 * mem::size_of::<BigSortable>() as u64;
}
#[bench]
fn sort_big_sorted(b: &mut Bencher) {
let mut v: Vec<BigSortable> = (0..10000).map(|i| (i, i, i, i)).collect();
b.iter(|| {
v.sort();
});
b.bytes = (v.len() * mem::size_of_val(&v[0])) as u64;
}
}<|fim▁end|> |
let mut a = vec![1, 2, 3];
a.insert(3, 4);
assert_eq!(a, [1, 2, 3, 4]); |
<|file_name|>passwordui.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Getting Things GNOME! - a personal organizer for the GNOME desktop
# Copyright (c) 2008-2013 - Lionel Dricot & Bertrand Rousseau
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.
# -----------------------------------------------------------------------------
from gi.repository import Gtk
from GTG import _
class PasswordUI(Gtk.Box):
'''Widget displaying a gtk.Label and a textbox to input a password'''
def __init__(self, req, backend, width):
'''Creates the gtk widgets and loads the current password in the text
field
@param req: a Requester
@param backend: a backend object
@param width: the width of the Gtk.Label object
'''
super(PasswordUI, self).__init__()
self.backend = backend
self.req = req
self._populate_gtk(width)
self._load_password()
self._connect_signals()
def _populate_gtk(self, width):
'''Creates the text box and the related label
@param width: the width of the Gtk.Label object
'''
password_label = Gtk.Label(label=_("Password:"))
password_label.set_alignment(xalign=0, yalign=0.5)
password_label.set_size_request(width=width, height=-1)
self.pack_start(password_label, False, True, 0)
align = Gtk.Alignment.new(0, 0.5, 1, 0)
align.set_padding(0, 0, 10, 0)
self.pack_start(align, True, True, 0)
self.password_textbox = Gtk.Entry()
align.add(self.password_textbox)
def _load_password(self):
'''Loads the password from the backend'''
password = self.backend.get_parameters()['password']
self.password_textbox.set_invisible_char('*')
self.password_textbox.set_visibility(False)
self.password_textbox.set_text(password)
def _connect_signals(self):
'''Connects the gtk signals'''
self.password_textbox.connect('changed', self.on_password_modified)
def commit_changes(self):
'''Saves the changes to the backend parameter ('password')'''
password = self.password_textbox.get_text()
self.backend.set_parameter('password', password)
def on_password_modified(self, sender):
''' Signal callback, executed when the user edits the password.
Disables the backend. The user will re-enable it to confirm the changes
(s)he made.
@param sender: not used, only here for signal compatibility
'''<|fim▁hole|> if self.backend.is_enabled() and not self.backend.is_default():
self.req.set_backend_enabled(self.backend.get_id(), False)<|fim▁end|> | |
<|file_name|>dump_results.py<|end_file_name|><|fim▁begin|>"""blah."""
from pyiem.util import get_dbconn
pgconn = get_dbconn("idep")
cursor = pgconn.cursor()
cursor.execute(
"""
SELECT r.hs_id, r.huc_12, p.fpath, extract(year from valid) as yr,
sum(runoff) as sum_runoff,
sum(loss) as sum_loss, sum(delivery) as sum_delivery from
results r JOIN flowpaths p on (r.hs_id = p.fid)
WHERE r.scenario = 5
GROUP by r.hs_id, r.huc_12, fpath, yr
"""
)
print("CATCHMENT,HUC12,FPATH,YEAR,RUNOFF,LOSS,DELIVERY")
for row in cursor:
fpath = row[0]
if fpath < 100:
catchment = 0
else:<|fim▁hole|> print(str(catchment) + ",%s,%s,%s,%.4f,%.4f,%.4f" % row[1:])<|fim▁end|> | catchment = int(str(fpath)[:-2]) |
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate bf;
extern crate clap;
use std::io::prelude::*;
use std::io::stdout;
use std::fs::File;
use bf::*;
use clap::{Arg, App};
fn main() {
let matches = App::new("bfi")
.version("0.1.0")
.author("David Barentt <[email protected]>")
.about("A rust implementation of brainfuck")
.arg(Arg::with_name("CODE").help("brainfuck code to execute"))
.arg(Arg::with_name("FILE")
.short("f")
.help("brainfuck code to execute from file")
.takes_value(true))
.arg(Arg::with_name("INPUT")
.short("i")
.long("input")
.help("Read input from file instead of stdin")
.takes_value(true))
.get_matches();
let code: String = if let Some(s) = matches.value_of("CODE") {
// Always take code from ARGS over FILE
s.to_string()
} else {
if let Some(f) = matches.value_of("FILE") {
let mut s = String::new();<|fim▁hole|> Err(e) => {
println!("Error while opening file `{}`: {}", f, e);
return;
}
};
if let Err(e) = file.read_to_string(&mut s) {
println!("Error while reading from `{}`: {}", f, e);
return;
}
s
} else {
println!("{}", matches.usage());
return;
}
};
match BFProgram::parse(&code) {
Ok(prog) => {
if let Some(read) = matches.value_of("INPUT") {
match File::open(read) {
Ok(mut r) => prog.run_with(&mut r, &mut stdout()),
Err(e) => {
println!("Error while opening file `{}` {}", read, e);
return;
}
}
} else {
prog.run();
}
}
Err(e) => println!("Error while parsing program: {}", e),
}
}<|fim▁end|> | let mut file = match File::open(f) {
Ok(fi) => fi, |
<|file_name|>body.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::FormDataBinding::FormDataMethods;
use dom::bindings::error::{Error, Fallible};
use dom::bindings::reflector::DomObject;
use dom::bindings::root::DomRoot;
use dom::bindings::str::USVString;
use dom::bindings::trace::RootedTraceableBox;
use dom::blob::{Blob, BlobImpl};
use dom::formdata::FormData;
use dom::globalscope::GlobalScope;
use dom::promise::Promise;
use js::jsapi::Heap;
use js::jsapi::JSContext;
use js::jsapi::JSObject;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::JS_ParseJSON;
use js::jsapi::Value as JSValue;
use js::jsval::UndefinedValue;
use js::typedarray::{ArrayBuffer, CreateWith};
use mime::{Mime, TopLevel, SubLevel};
use std::cell::Ref;
use std::ptr;<|fim▁hole|>
#[derive(Clone, Copy, JSTraceable, MallocSizeOf)]
pub enum BodyType {
Blob,
FormData,
Json,
Text,
ArrayBuffer
}
pub enum FetchedData {
Text(String),
Json(RootedTraceableBox<Heap<JSValue>>),
BlobData(DomRoot<Blob>),
FormData(DomRoot<FormData>),
ArrayBuffer(RootedTraceableBox<Heap<*mut JSObject>>),
}
// https://fetch.spec.whatwg.org/#concept-body-consume-body
#[allow(unrooted_must_root)]
pub fn consume_body<T: BodyOperations + DomObject>(object: &T, body_type: BodyType) -> Rc<Promise> {
let promise = Promise::new(&object.global());
// Step 1
if object.get_body_used() || object.is_locked() {
promise.reject_error(Error::Type(
"The response's stream is disturbed or locked".to_string(),
));
return promise;
}
object.set_body_promise(&promise, body_type);
// Steps 2-4
// TODO: Body does not yet have a stream.
consume_body_with_promise(object, body_type, &promise);
promise
}
// https://fetch.spec.whatwg.org/#concept-body-consume-body
#[allow(unrooted_must_root)]
pub fn consume_body_with_promise<T: BodyOperations + DomObject>(object: &T,
body_type: BodyType,
promise: &Promise) {
// Step 5
let body = match object.take_body() {
Some(body) => body,
None => return,
};
let pkg_data_results = run_package_data_algorithm(object,
body,
body_type,
object.get_mime_type());
match pkg_data_results {
Ok(results) => {
match results {
FetchedData::Text(s) => promise.resolve_native(&USVString(s)),
FetchedData::Json(j) => promise.resolve_native(&j),
FetchedData::BlobData(b) => promise.resolve_native(&b),
FetchedData::FormData(f) => promise.resolve_native(&f),
FetchedData::ArrayBuffer(a) => promise.resolve_native(&a)
};
},
Err(err) => promise.reject_error(err),
}
}
// https://fetch.spec.whatwg.org/#concept-body-package-data
#[allow(unsafe_code)]
fn run_package_data_algorithm<T: BodyOperations + DomObject>(object: &T,
bytes: Vec<u8>,
body_type: BodyType,
mime_type: Ref<Vec<u8>>)
-> Fallible<FetchedData> {
let global = object.global();
let cx = global.get_cx();
let mime = &*mime_type;
match body_type {
BodyType::Text => run_text_data_algorithm(bytes),
BodyType::Json => run_json_data_algorithm(cx, bytes),
BodyType::Blob => run_blob_data_algorithm(&global, bytes, mime),
BodyType::FormData => run_form_data_algorithm(&global, bytes, mime),
BodyType::ArrayBuffer => unsafe {
run_array_buffer_data_algorithm(cx, bytes)
}
}
}
fn run_text_data_algorithm(bytes: Vec<u8>) -> Fallible<FetchedData> {
Ok(FetchedData::Text(String::from_utf8_lossy(&bytes).into_owned()))
}
#[allow(unsafe_code)]
fn run_json_data_algorithm(cx: *mut JSContext,
bytes: Vec<u8>) -> Fallible<FetchedData> {
let json_text = String::from_utf8_lossy(&bytes);
let json_text: Vec<u16> = json_text.encode_utf16().collect();
rooted!(in(cx) let mut rval = UndefinedValue());
unsafe {
if !JS_ParseJSON(cx,
json_text.as_ptr(),
json_text.len() as u32,
rval.handle_mut()) {
JS_ClearPendingException(cx);
// TODO: See issue #13464. Exception should be thrown instead of cleared.
return Err(Error::Type("Failed to parse JSON".to_string()));
}
let rooted_heap = RootedTraceableBox::from_box(Heap::boxed(rval.get()));
Ok(FetchedData::Json(rooted_heap))
}
}
fn run_blob_data_algorithm(root: &GlobalScope,
bytes: Vec<u8>,
mime: &[u8]) -> Fallible<FetchedData> {
let mime_string = if let Ok(s) = String::from_utf8(mime.to_vec()) {
s
} else {
"".to_string()
};
let blob = Blob::new(root, BlobImpl::new_from_bytes(bytes), mime_string);
Ok(FetchedData::BlobData(blob))
}
fn run_form_data_algorithm(root: &GlobalScope, bytes: Vec<u8>, mime: &[u8]) -> Fallible<FetchedData> {
let mime_str = if let Ok(s) = str::from_utf8(mime) {
s
} else {
""
};
let mime: Mime = mime_str.parse().map_err(
|_| Error::Type("Inappropriate MIME-type for Body".to_string()))?;
match mime {
// TODO
// ... Parser for Mime(TopLevel::Multipart, SubLevel::FormData, _)
// ... is not fully determined yet.
Mime(TopLevel::Application, SubLevel::WwwFormUrlEncoded, _) => {
let entries = form_urlencoded::parse(&bytes);
let formdata = FormData::new(None, root);
for (k, e) in entries {
formdata.Append(USVString(k.into_owned()), USVString(e.into_owned()));
}
return Ok(FetchedData::FormData(formdata));
},
_ => return Err(Error::Type("Inappropriate MIME-type for Body".to_string())),
}
}
#[allow(unsafe_code)]
unsafe fn run_array_buffer_data_algorithm(cx: *mut JSContext, bytes: Vec<u8>) -> Fallible<FetchedData> {
rooted!(in(cx) let mut array_buffer_ptr = ptr::null_mut::<JSObject>());
let arraybuffer = ArrayBuffer::create(cx, CreateWith::Slice(&bytes), array_buffer_ptr.handle_mut());
if arraybuffer.is_err() {
return Err(Error::JSFailed);
}
let rooted_heap = RootedTraceableBox::from_box(Heap::boxed(array_buffer_ptr.get()));
Ok(FetchedData::ArrayBuffer(rooted_heap))
}
pub trait BodyOperations {
fn get_body_used(&self) -> bool;
fn set_body_promise(&self, p: &Rc<Promise>, body_type: BodyType);
/// Returns `Some(_)` if the body is complete, `None` if there is more to
/// come.
fn take_body(&self) -> Option<Vec<u8>>;
fn is_locked(&self) -> bool;
fn get_mime_type(&self) -> Ref<Vec<u8>>;
}<|fim▁end|> | use std::rc::Rc;
use std::str;
use url::form_urlencoded; |
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2016 Acsone SA/NV
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{<|fim▁hole|> 'name': 'Account Invoice Check Total',
'summary': """
Check if the verification total is equal to the bill's total""",
'version': '10.0.1.0.0',
'license': 'AGPL-3',
'author': 'Acsone SA/NV,Odoo Community Association (OCA)',
'website': 'https://acsone.eu/',
'depends': [
'account',
],
'data': [
'views/account_config_settings.xml',
'security/account_invoice_security.xml',
'views/account_invoice.xml',
],
}<|fim▁end|> | |
<|file_name|>texture_format_util.cpp<|end_file_name|><|fim▁begin|>//
// Copyright 2015 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// texture_format_util:
// Contains helper functions for texture_format_table
//
#include "libANGLE/renderer/d3d/d3d11/texture_format_util.h"
#include "libANGLE/renderer/d3d/d3d11/formatutils11.h"
#include "libANGLE/renderer/d3d/loadimage.h"
#include "libANGLE/renderer/d3d/loadimage_etc.h"
namespace rx
{
namespace d3d11
{
namespace
{
// ES3 image loading functions vary based on:
// - the GL internal format (supplied to glTex*Image*D)
// - the GL data type given (supplied to glTex*Image*D)
// - the target DXGI_FORMAT that the image will be loaded into (which is chosen based on the D3D
// device's capabilities)
// This map type determines which loading function to use, based on these three parameters.
// Source formats and types are taken from Tables 3.2 and 3.3 of the ES 3 spec.
void UnimplementedLoadFunction(size_t width,
size_t height,
size_t depth,
const uint8_t *input,
size_t inputRowPitch,
size_t inputDepthPitch,
uint8_t *output,
size_t outputRowPitch,
size_t outputDepthPitch)
{
UNIMPLEMENTED();
}
void UnreachableLoadFunction(size_t width,
size_t height,
size_t depth,
const uint8_t *input,
size_t inputRowPitch,
size_t inputDepthPitch,
uint8_t *output,
size_t outputRowPitch,
size_t outputDepthPitch)
{
UNREACHABLE();
}
// A helper function to insert data into the D3D11LoadFunctionMap with fewer characters.
inline void InsertLoadFunction(D3D11LoadFunctionMap *map, GLenum internalFormat, GLenum type,
DXGI_FORMAT dxgiFormat, LoadImageFunction loadFunc)
{
(*map)[internalFormat].push_back(GLTypeDXGIFunctionPair(type, DxgiFormatLoadFunctionPair(dxgiFormat, loadFunc)));
}
} // namespace
// TODO: This will be generated by a JSON file
const D3D11LoadFunctionMap &BuildD3D11LoadFunctionMap()
{
static D3D11LoadFunctionMap map;
// clang-format off
// | Internal format | Type | Target DXGI Format | Load function |
InsertLoadFunction(&map, GL_RGBA8, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_RGB5_A1, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_RGBA4, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_SRGB8_ALPHA8, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_RGBA8_SNORM, GL_BYTE, DXGI_FORMAT_R8G8B8A8_SNORM, LoadToNative<GLbyte, 4> );
InsertLoadFunction(&map, GL_RGBA4, GL_UNSIGNED_SHORT_4_4_4_4, DXGI_FORMAT_R8G8B8A8_UNORM, LoadRGBA4ToRGBA8 );
InsertLoadFunction(&map, GL_RGBA4, GL_UNSIGNED_SHORT_4_4_4_4, DXGI_FORMAT_B4G4R4A4_UNORM, LoadRGBA4ToARGB4 );
InsertLoadFunction(&map, GL_RGB10_A2, GL_UNSIGNED_INT_2_10_10_10_REV, DXGI_FORMAT_R10G10B10A2_UNORM, LoadToNative<GLuint, 1> );
InsertLoadFunction(&map, GL_RGB5_A1, GL_UNSIGNED_SHORT_5_5_5_1, DXGI_FORMAT_R8G8B8A8_UNORM, LoadRGB5A1ToRGBA8 );
InsertLoadFunction(&map, GL_RGB5_A1, GL_UNSIGNED_SHORT_5_5_5_1, DXGI_FORMAT_B5G5R5A1_UNORM, LoadRGB5A1ToA1RGB5 );
InsertLoadFunction(&map, GL_RGB5_A1, GL_UNSIGNED_INT_2_10_10_10_REV, DXGI_FORMAT_R8G8B8A8_UNORM, LoadRGB10A2ToRGBA8 );
InsertLoadFunction(&map, GL_RGBA16F, GL_HALF_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadToNative<GLhalf, 4> );
InsertLoadFunction(&map, GL_RGBA16F, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadToNative<GLhalf, 4> );
InsertLoadFunction(&map, GL_RGBA32F, GL_FLOAT, DXGI_FORMAT_R32G32B32A32_FLOAT, LoadToNative<GLfloat, 4> );
InsertLoadFunction(&map, GL_RGBA16F, GL_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, Load32FTo16F<4> );
InsertLoadFunction(&map, GL_RGBA8UI, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UINT, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_RGBA8I, GL_BYTE, DXGI_FORMAT_R8G8B8A8_SINT, LoadToNative<GLbyte, 4> );
InsertLoadFunction(&map, GL_RGBA16UI, GL_UNSIGNED_SHORT, DXGI_FORMAT_R16G16B16A16_UINT, LoadToNative<GLushort, 4> );
InsertLoadFunction(&map, GL_RGBA16I, GL_SHORT, DXGI_FORMAT_R16G16B16A16_SINT, LoadToNative<GLshort, 4> );
InsertLoadFunction(&map, GL_RGBA32UI, GL_UNSIGNED_INT, DXGI_FORMAT_R32G32B32A32_UINT, LoadToNative<GLuint, 4> );<|fim▁hole|> InsertLoadFunction(&map, GL_RGBA32I, GL_INT, DXGI_FORMAT_R32G32B32A32_SINT, LoadToNative<GLint, 4> );
InsertLoadFunction(&map, GL_RGB10_A2UI, GL_UNSIGNED_INT_2_10_10_10_REV, DXGI_FORMAT_R10G10B10A2_UINT, LoadToNative<GLuint, 1> );
InsertLoadFunction(&map, GL_RGB8, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadToNative3To4<GLubyte, 0xFF> );
InsertLoadFunction(&map, GL_RGB565, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadToNative3To4<GLubyte, 0xFF> );
InsertLoadFunction(&map, GL_SRGB8, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, LoadToNative3To4<GLubyte, 0xFF> );
InsertLoadFunction(&map, GL_RGB8_SNORM, GL_BYTE, DXGI_FORMAT_R8G8B8A8_SNORM, LoadToNative3To4<GLbyte, 0x7F> );
InsertLoadFunction(&map, GL_RGB565, GL_UNSIGNED_SHORT_5_6_5, DXGI_FORMAT_R8G8B8A8_UNORM, LoadR5G6B5ToRGBA8 );
InsertLoadFunction(&map, GL_RGB565, GL_UNSIGNED_SHORT_5_6_5, DXGI_FORMAT_B5G6R5_UNORM, LoadToNative<GLushort, 1> );
InsertLoadFunction(&map, GL_R11F_G11F_B10F, GL_UNSIGNED_INT_10F_11F_11F_REV, DXGI_FORMAT_R11G11B10_FLOAT, LoadToNative<GLuint, 1> );
InsertLoadFunction(&map, GL_RGB9_E5, GL_UNSIGNED_INT_5_9_9_9_REV, DXGI_FORMAT_R9G9B9E5_SHAREDEXP, LoadToNative<GLuint, 1> );
InsertLoadFunction(&map, GL_RGB16F, GL_HALF_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadToNative3To4<GLhalf, gl::Float16One>);
InsertLoadFunction(&map, GL_RGB16F, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadToNative3To4<GLhalf, gl::Float16One>);
InsertLoadFunction(&map, GL_R11F_G11F_B10F, GL_HALF_FLOAT, DXGI_FORMAT_R11G11B10_FLOAT, LoadRGB16FToRG11B10F );
InsertLoadFunction(&map, GL_R11F_G11F_B10F, GL_HALF_FLOAT_OES, DXGI_FORMAT_R11G11B10_FLOAT, LoadRGB16FToRG11B10F );
InsertLoadFunction(&map, GL_RGB9_E5, GL_HALF_FLOAT, DXGI_FORMAT_R9G9B9E5_SHAREDEXP, LoadRGB16FToRGB9E5 );
InsertLoadFunction(&map, GL_RGB9_E5, GL_HALF_FLOAT_OES, DXGI_FORMAT_R9G9B9E5_SHAREDEXP, LoadRGB16FToRGB9E5 );
InsertLoadFunction(&map, GL_RGB32F, GL_FLOAT, DXGI_FORMAT_R32G32B32A32_FLOAT, LoadToNative3To4<GLfloat, gl::Float32One>);
InsertLoadFunction(&map, GL_RGB16F, GL_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadRGB32FToRGBA16F );
InsertLoadFunction(&map, GL_R11F_G11F_B10F, GL_FLOAT, DXGI_FORMAT_R11G11B10_FLOAT, LoadRGB32FToRG11B10F );
InsertLoadFunction(&map, GL_RGB9_E5, GL_FLOAT, DXGI_FORMAT_R9G9B9E5_SHAREDEXP, LoadRGB32FToRGB9E5 );
InsertLoadFunction(&map, GL_RGB8UI, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UINT, LoadToNative3To4<GLubyte, 0x01> );
InsertLoadFunction(&map, GL_RGB8I, GL_BYTE, DXGI_FORMAT_R8G8B8A8_SINT, LoadToNative3To4<GLbyte, 0x01> );
InsertLoadFunction(&map, GL_RGB16UI, GL_UNSIGNED_SHORT, DXGI_FORMAT_R16G16B16A16_UINT, LoadToNative3To4<GLushort, 0x0001> );
InsertLoadFunction(&map, GL_RGB16I, GL_SHORT, DXGI_FORMAT_R16G16B16A16_SINT, LoadToNative3To4<GLshort, 0x0001> );
InsertLoadFunction(&map, GL_RGB32UI, GL_UNSIGNED_INT, DXGI_FORMAT_R32G32B32A32_UINT, LoadToNative3To4<GLuint, 0x00000001> );
InsertLoadFunction(&map, GL_RGB32I, GL_INT, DXGI_FORMAT_R32G32B32A32_SINT, LoadToNative3To4<GLint, 0x00000001> );
InsertLoadFunction(&map, GL_RG8, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8_UNORM, LoadToNative<GLubyte, 2> );
InsertLoadFunction(&map, GL_RG8_SNORM, GL_BYTE, DXGI_FORMAT_R8G8_SNORM, LoadToNative<GLbyte, 2> );
InsertLoadFunction(&map, GL_RG16F, GL_HALF_FLOAT, DXGI_FORMAT_R16G16_FLOAT, LoadToNative<GLhalf, 2> );
InsertLoadFunction(&map, GL_RG16F, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16G16_FLOAT, LoadToNative<GLhalf, 2> );
InsertLoadFunction(&map, GL_RG32F, GL_FLOAT, DXGI_FORMAT_R32G32_FLOAT, LoadToNative<GLfloat, 2> );
InsertLoadFunction(&map, GL_RG16F, GL_FLOAT, DXGI_FORMAT_R16G16_FLOAT, Load32FTo16F<2> );
InsertLoadFunction(&map, GL_RG8UI, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8_UINT, LoadToNative<GLubyte, 2> );
InsertLoadFunction(&map, GL_RG8I, GL_BYTE, DXGI_FORMAT_R8G8_SINT, LoadToNative<GLbyte, 2> );
InsertLoadFunction(&map, GL_RG16UI, GL_UNSIGNED_SHORT, DXGI_FORMAT_R16G16_UINT, LoadToNative<GLushort, 2> );
InsertLoadFunction(&map, GL_RG16I, GL_SHORT, DXGI_FORMAT_R16G16_SINT, LoadToNative<GLshort, 2> );
InsertLoadFunction(&map, GL_RG32UI, GL_UNSIGNED_INT, DXGI_FORMAT_R32G32_UINT, LoadToNative<GLuint, 2> );
InsertLoadFunction(&map, GL_RG32I, GL_INT, DXGI_FORMAT_R32G32_SINT, LoadToNative<GLint, 2> );
InsertLoadFunction(&map, GL_R8, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8_UNORM, LoadToNative<GLubyte, 1> );
InsertLoadFunction(&map, GL_R8_SNORM, GL_BYTE, DXGI_FORMAT_R8_SNORM, LoadToNative<GLbyte, 1> );
InsertLoadFunction(&map, GL_R16F, GL_HALF_FLOAT, DXGI_FORMAT_R16_FLOAT, LoadToNative<GLhalf, 1> );
InsertLoadFunction(&map, GL_R16F, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16_FLOAT, LoadToNative<GLhalf, 1> );
InsertLoadFunction(&map, GL_R32F, GL_FLOAT, DXGI_FORMAT_R32_FLOAT, LoadToNative<GLfloat, 1> );
InsertLoadFunction(&map, GL_R16F, GL_FLOAT, DXGI_FORMAT_R16_FLOAT, Load32FTo16F<1> );
InsertLoadFunction(&map, GL_R8UI, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8_UINT, LoadToNative<GLubyte, 1> );
InsertLoadFunction(&map, GL_R8I, GL_BYTE, DXGI_FORMAT_R8_SINT, LoadToNative<GLbyte, 1> );
InsertLoadFunction(&map, GL_R16UI, GL_UNSIGNED_SHORT, DXGI_FORMAT_R16_UINT, LoadToNative<GLushort, 1> );
InsertLoadFunction(&map, GL_R16I, GL_SHORT, DXGI_FORMAT_R16_SINT, LoadToNative<GLshort, 1> );
InsertLoadFunction(&map, GL_R32UI, GL_UNSIGNED_INT, DXGI_FORMAT_R32_UINT, LoadToNative<GLuint, 1> );
InsertLoadFunction(&map, GL_R32I, GL_INT, DXGI_FORMAT_R32_SINT, LoadToNative<GLint, 1> );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT16, GL_UNSIGNED_SHORT, DXGI_FORMAT_R16_TYPELESS, LoadToNative<GLushort, 1> );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT16, GL_UNSIGNED_SHORT, DXGI_FORMAT_D16_UNORM, LoadToNative<GLushort, 1> );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT24, GL_UNSIGNED_INT, DXGI_FORMAT_R24G8_TYPELESS, LoadR32ToR24G8 );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT24, GL_UNSIGNED_INT, DXGI_FORMAT_D24_UNORM_S8_UINT, LoadR32ToR24G8 );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT16, GL_UNSIGNED_INT, DXGI_FORMAT_R16_TYPELESS, LoadR32ToR16 );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT32F, GL_FLOAT, DXGI_FORMAT_R32_TYPELESS, LoadToNative<GLfloat, 1> );
InsertLoadFunction(&map, GL_DEPTH_COMPONENT32F, GL_FLOAT, DXGI_FORMAT_UNKNOWN, UnimplementedLoadFunction );
InsertLoadFunction(&map, GL_DEPTH24_STENCIL8, GL_UNSIGNED_INT_24_8, DXGI_FORMAT_R24G8_TYPELESS, LoadR32ToR24G8 );
InsertLoadFunction(&map, GL_DEPTH24_STENCIL8, GL_UNSIGNED_INT_24_8, DXGI_FORMAT_D24_UNORM_S8_UINT, LoadR32ToR24G8 );
InsertLoadFunction(&map, GL_DEPTH32F_STENCIL8, GL_FLOAT_32_UNSIGNED_INT_24_8_REV, DXGI_FORMAT_R32G8X24_TYPELESS, LoadToNative<GLuint, 2> );
InsertLoadFunction(&map, GL_DEPTH32F_STENCIL8, GL_FLOAT_32_UNSIGNED_INT_24_8_REV, DXGI_FORMAT_UNKNOWN, UnimplementedLoadFunction );
InsertLoadFunction(&map, GL_STENCIL_INDEX8, DXGI_FORMAT_R24G8_TYPELESS, DXGI_FORMAT_UNKNOWN, UnimplementedLoadFunction );
InsertLoadFunction(&map, GL_STENCIL_INDEX8, DXGI_FORMAT_D24_UNORM_S8_UINT, DXGI_FORMAT_UNKNOWN, UnimplementedLoadFunction );
// Unsized formats
// Load functions are unreachable because they are converted to sized internal formats based on
// the format and type before loading takes place.
InsertLoadFunction(&map, GL_RGBA, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_RGBA, GL_UNSIGNED_SHORT_4_4_4_4, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_RGBA, GL_UNSIGNED_SHORT_5_5_5_1, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_RGB, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_RGB, GL_UNSIGNED_SHORT_5_6_5, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_LUMINANCE, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_ALPHA, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
InsertLoadFunction(&map, GL_BGRA_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, UnreachableLoadFunction );
// From GL_OES_texture_float
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA, GL_FLOAT, DXGI_FORMAT_R32G32B32A32_FLOAT, LoadLA32FToRGBA32F );
InsertLoadFunction(&map, GL_LUMINANCE, GL_FLOAT, DXGI_FORMAT_R32G32B32A32_FLOAT, LoadL32FToRGBA32F );
InsertLoadFunction(&map, GL_ALPHA, GL_FLOAT, DXGI_FORMAT_R32G32B32A32_FLOAT, LoadA32FToRGBA32F );
// From GL_OES_texture_half_float
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA, GL_HALF_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadLA16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadLA16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE, GL_HALF_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadL16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadL16FToRGBA16F );
InsertLoadFunction(&map, GL_ALPHA, GL_HALF_FLOAT, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadA16FToRGBA16F );
InsertLoadFunction(&map, GL_ALPHA, GL_HALF_FLOAT_OES, DXGI_FORMAT_R16G16B16A16_FLOAT, LoadA16FToRGBA16F );
// From GL_EXT_texture_storage
InsertLoadFunction(&map, GL_ALPHA8_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_A8_UNORM, LoadToNative<GLubyte, 1> );
InsertLoadFunction(&map, GL_ALPHA8_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadA8ToRGBA8 );
InsertLoadFunction(&map, GL_LUMINANCE8_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadL8ToRGBA8 );
InsertLoadFunction(&map, GL_LUMINANCE8_ALPHA8_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadLA8ToRGBA8 );
InsertLoadFunction(&map, GL_ALPHA32F_EXT, GL_FLOAT, DXGI_FORMAT_UNKNOWN, LoadA32FToRGBA32F );
InsertLoadFunction(&map, GL_LUMINANCE32F_EXT, GL_FLOAT, DXGI_FORMAT_UNKNOWN, LoadL32FToRGBA32F );
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA32F_EXT, GL_FLOAT, DXGI_FORMAT_UNKNOWN, LoadLA32FToRGBA32F );
InsertLoadFunction(&map, GL_ALPHA16F_EXT, GL_HALF_FLOAT, DXGI_FORMAT_UNKNOWN, LoadA16FToRGBA16F );
InsertLoadFunction(&map, GL_ALPHA16F_EXT, GL_HALF_FLOAT_OES, DXGI_FORMAT_UNKNOWN, LoadA16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE16F_EXT, GL_HALF_FLOAT, DXGI_FORMAT_UNKNOWN, LoadL16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE16F_EXT, GL_HALF_FLOAT_OES, DXGI_FORMAT_UNKNOWN, LoadL16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA16F_EXT, GL_HALF_FLOAT, DXGI_FORMAT_UNKNOWN, LoadLA16FToRGBA16F );
InsertLoadFunction(&map, GL_LUMINANCE_ALPHA16F_EXT, GL_HALF_FLOAT_OES, DXGI_FORMAT_UNKNOWN, LoadLA16FToRGBA16F );
// From GL_ANGLE_depth_texture
InsertLoadFunction(&map, GL_DEPTH_COMPONENT32_OES, GL_UNSIGNED_INT, DXGI_FORMAT_UNKNOWN, LoadR32ToR24G8 );
// From GL_EXT_texture_format_BGRA8888
InsertLoadFunction(&map, GL_BGRA8_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_BGRA4_ANGLEX, GL_UNSIGNED_SHORT_4_4_4_4_REV_EXT, DXGI_FORMAT_UNKNOWN, LoadRGBA4ToRGBA8 );
InsertLoadFunction(&map, GL_BGRA4_ANGLEX, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadToNative<GLubyte, 4> );
InsertLoadFunction(&map, GL_BGR5_A1_ANGLEX, GL_UNSIGNED_SHORT_1_5_5_5_REV_EXT, DXGI_FORMAT_UNKNOWN, LoadRGB5A1ToRGBA8 );
InsertLoadFunction(&map, GL_BGR5_A1_ANGLEX, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadToNative<GLubyte, 4> );
// Compressed formats
// From ES 3.0.1 spec, table 3.16
// | Internal format | Type | Target DXGI Format | Load function
InsertLoadFunction(&map, GL_COMPRESSED_R11_EAC, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8_UNORM, LoadEACR11ToR8 );
InsertLoadFunction(&map, GL_COMPRESSED_SIGNED_R11_EAC, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8_SNORM, LoadEACR11SToR8 );
InsertLoadFunction(&map, GL_COMPRESSED_RG11_EAC, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8_UNORM, LoadEACRG11ToRG8 );
InsertLoadFunction(&map, GL_COMPRESSED_SIGNED_RG11_EAC, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8_SNORM, LoadEACRG11SToRG8 );
InsertLoadFunction(&map, GL_COMPRESSED_RGB8_ETC2, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadETC2RGB8ToRGBA8 );
InsertLoadFunction(&map, GL_COMPRESSED_SRGB8_ETC2, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, LoadETC2SRGB8ToRGBA8 );
InsertLoadFunction(&map, GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadETC2RGB8A1ToRGBA8 );
InsertLoadFunction(&map, GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, LoadETC2SRGB8A1ToRGBA8);
InsertLoadFunction(&map, GL_COMPRESSED_RGBA8_ETC2_EAC, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadETC2RGBA8ToRGBA8 );
InsertLoadFunction(&map, GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB, LoadETC2SRGBA8ToSRGBA8);
// From GL_ETC1_RGB8_OES
InsertLoadFunction(&map, GL_ETC1_RGB8_OES, GL_UNSIGNED_BYTE, DXGI_FORMAT_R8G8B8A8_UNORM, LoadETC1RGB8ToRGBA8 );
// From GL_EXT_texture_compression_dxt1
InsertLoadFunction(&map, GL_COMPRESSED_RGB_S3TC_DXT1_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadCompressedToNative<4, 4, 8> );
InsertLoadFunction(&map, GL_COMPRESSED_RGBA_S3TC_DXT1_EXT, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadCompressedToNative<4, 4, 8> );
// From GL_ANGLE_texture_compression_dxt3
InsertLoadFunction(&map, GL_COMPRESSED_RGBA_S3TC_DXT3_ANGLE, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadCompressedToNative<4, 4, 16> );
// From GL_ANGLE_texture_compression_dxt5
InsertLoadFunction(&map, GL_COMPRESSED_RGBA_S3TC_DXT5_ANGLE, GL_UNSIGNED_BYTE, DXGI_FORMAT_UNKNOWN, LoadCompressedToNative<4, 4, 16> );
// clang-format on
return map;
}
typedef std::pair<InitializeTextureFormatPair, InitializeTextureDataFunction>
InternalFormatInitializerPair;
// TODO: This should be generated by a JSON file
const InternalFormatInitializerMap &BuildInternalFormatInitializerMap()
{
static InternalFormatInitializerMap map;
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB8, DXGI_FORMAT_R8G8B8A8_UNORM),
Initialize4ComponentData<GLubyte, 0x00, 0x00, 0x00, 0xFF>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB565, DXGI_FORMAT_R8G8B8A8_UNORM),
Initialize4ComponentData<GLubyte, 0x00, 0x00, 0x00, 0xFF>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_SRGB8, DXGI_FORMAT_R8G8B8A8_UNORM_SRGB),
Initialize4ComponentData<GLubyte, 0x00, 0x00, 0x00, 0xFF>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB16F, DXGI_FORMAT_R16G16B16A16_FLOAT),
Initialize4ComponentData<GLhalf, 0x0000, 0x0000, 0x0000, gl::Float16One>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB32F, DXGI_FORMAT_R32G32B32A32_FLOAT),
Initialize4ComponentData<GLfloat, 0x00000000, 0x00000000, 0x00000000, gl::Float32One>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB8UI, DXGI_FORMAT_R8G8B8A8_UINT),
Initialize4ComponentData<GLubyte, 0x00, 0x00, 0x00, 0x01>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB8I, DXGI_FORMAT_R8G8B8A8_SINT),
Initialize4ComponentData<GLbyte, 0x00, 0x00, 0x00, 0x01>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB16UI, DXGI_FORMAT_R16G16B16A16_UINT),
Initialize4ComponentData<GLushort, 0x0000, 0x0000, 0x0000, 0x0001>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB16I, DXGI_FORMAT_R16G16B16A16_SINT),
Initialize4ComponentData<GLshort, 0x0000, 0x0000, 0x0000, 0x0001>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB32UI, DXGI_FORMAT_R32G32B32A32_UINT),
Initialize4ComponentData<GLuint, 0x00000000, 0x00000000, 0x00000000, 0x00000001>));
map.insert(InternalFormatInitializerPair(
InitializeTextureFormatPair(GL_RGB32I, DXGI_FORMAT_R32G32B32A32_SINT),
Initialize4ComponentData<GLint, 0x00000000, 0x00000000, 0x00000000, 0x00000001>));
return map;
}
} // namespace d3d11
} // namespace rx<|fim▁end|> |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.