prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>Panel.js<|end_file_name|><|fim▁begin|>/*jshint maxstatements:false*/
define(function (require, exports) {
"use strict";
var moment = require("moment"),
Promise = require("bluebird"),
_ = brackets.getModule("thirdparty/lodash"),
CodeInspection = brackets.getModule("language/CodeInspection"),
CommandManager = brackets.getModule("command/CommandManager"),
Commands = brackets.getModule("command/Commands"),
Dialogs = brackets.getModule("widgets/Dialogs"),
DocumentManager = brackets.getModule("document/DocumentManager"),
EditorManager = brackets.getModule("editor/EditorManager"),
FileUtils = brackets.getModule("file/FileUtils"),
FileViewController = brackets.getModule("project/FileViewController"),
KeyBindingManager = brackets.getModule("command/KeyBindingManager"),
LanguageManager = brackets.getModule("language/LanguageManager"),
FileSystem = brackets.getModule("filesystem/FileSystem"),
Menus = brackets.getModule("command/Menus"),
FindInFiles = brackets.getModule("search/FindInFiles"),
PanelManager = brackets.getModule("view/PanelManager"),
ProjectManager = brackets.getModule("project/ProjectManager"),
StringUtils = brackets.getModule("utils/StringUtils"),
Svn = require("src/svn/Svn"),
Events = require("./Events"),
EventEmitter = require("./EventEmitter"),
Preferences = require("./Preferences"),
ErrorHandler = require("./ErrorHandler"),
ExpectedError = require("./ExpectedError"),
Main = require("./Main"),
GutterManager = require("./GutterManager"),
Strings = require("../strings"),
Utils = require("src/Utils"),
SettingsDialog = require("./SettingsDialog"),
PANEL_COMMAND_ID = "brackets-git.panel";
var svnPanelTemplate = require("text!templates/svn-panel.html"),
gitPanelResultsTemplate = require("text!templates/git-panel-results.html"),
gitAuthorsDialogTemplate = require("text!templates/authors-dialog.html"),
gitCommitDialogTemplate = require("text!templates/git-commit-dialog.html"),
gitDiffDialogTemplate = require("text!templates/git-diff-dialog.html"),
questionDialogTemplate = require("text!templates/git-question-dialog.html");
var showFileWhiteList = /^\.gitignore$/;
var gitPanel = null,
$gitPanel = $(null),
gitPanelDisabled = null,
gitPanelMode = null,
showingUntracked = true,
$tableContainer = $(null);
/**
* Reloads the Document's contents from disk, discarding any unsaved changes in the editor.
*
* @param {!Document} doc
* @return {Promise} Resolved after editor has been refreshed; rejected if unable to load the
* file's new content. Errors are logged but no UI is shown.
*/
function _reloadDoc(doc) {
return Promise.cast(FileUtils.readAsText(doc.file))
.then(function (text) {
doc.refreshText(text, new Date());
})
.catch(function (err) {
ErrorHandler.logError("Error reloading contents of " + doc.file.fullPath);
ErrorHandler.logError(err);
});
}
function lintFile(filename) {
return CodeInspection.inspectFile(FileSystem.getFileForPath(Utils.getProjectRoot() + filename));
}
function _makeDialogBig($dialog) {
var $wrapper = $dialog.parents(".modal-wrapper").first();
if ($wrapper.length === 0) { return; }
// We need bigger commit dialog
var minWidth = 500,
minHeight = 300,
maxWidth = $wrapper.width(),
maxHeight = $wrapper.height(),
desiredWidth = maxWidth / 2,
desiredHeight = maxHeight / 2;
if (desiredWidth < minWidth) { desiredWidth = minWidth; }
if (desiredHeight < minHeight) { desiredHeight = minHeight; }
$dialog
.width(desiredWidth)
.children(".modal-body")
.css("max-height", desiredHeight)
.end();
return { width: desiredWidth, height: desiredHeight };
}
function _showCommitDialog(stagedDiff, lintResults, prefilledMessage) {
// Flatten the error structure from various providers
lintResults.forEach(function (lintResult) {
lintResult.errors = [];
if (Array.isArray(lintResult.result)) {
lintResult.result.forEach(function (resultSet) {
if (!resultSet.result || !resultSet.result.errors) { return; }
var providerName = resultSet.provider.name;
resultSet.result.errors.forEach(function (e) {
lintResult.errors.push((e.pos.line + 1) + ": " + e.message + " (" + providerName + ")");
});
});
} else {
ErrorHandler.logError("[brackets-git] lintResults contain object in unexpected format: " + JSON.stringify(lintResult));
}
lintResult.hasErrors = lintResult.errors.length > 0;
});
// Filter out only results with errors to show
lintResults = _.filter(lintResults, function (lintResult) {
return lintResult.hasErrors;
});
// Open the dialog
var compiledTemplate = Mustache.render(gitCommitDialogTemplate, {
Strings: Strings,
hasLintProblems: lintResults.length > 0,
lintResults: lintResults
}),
dialog = Dialogs.showModalDialogUsingTemplate(compiledTemplate),
$dialog = dialog.getElement();
// We need bigger commit dialog
_makeDialogBig($dialog);
// Show nicely colored commit diff
$dialog.find(".commit-diff").append(Utils.formatDiff(stagedDiff));
function getCommitMessageElement() {
var r = $dialog.find("[name='commit-message']:visible");
if (r.length !== 1) {
r = $dialog.find("[name='commit-message']");
for (var i = 0; i < r.length; i++) {
if ($(r[i]).css("display") !== "none") {
return $(r[i]);
}
}
}
return r;
}
var $commitMessageCount = $dialog.find("input[name='commit-message-count']");
// Add event to count characters in commit message
var recalculateMessageLength = function () {
var val = getCommitMessageElement().val().trim(),
length = val.length;
if (val.indexOf("\n")) {
// longest line
length = Math.max.apply(null, val.split("\n").map(function (l) { return l.length; }));
}
$commitMessageCount
.val(length)
.toggleClass("over50", length > 50 && length <= 100)
.toggleClass("over100", length > 100);
};
var usingTextArea = false;
// commit message handling
function switchCommitMessageElement() {
usingTextArea = !usingTextArea;
var findStr = "[name='commit-message']",
currentValue = $dialog.find(findStr + ":visible").val();
$dialog.find(findStr).toggle();
$dialog.find(findStr + ":visible")
.val(currentValue)
.focus();
recalculateMessageLength();
}
$dialog.find("button.primary").on("click", function (e) {
var $commitMessage = getCommitMessageElement();
if ($commitMessage.val().trim().length === 0) {
e.stopPropagation();
$commitMessage.addClass("invalid");
} else {
$commitMessage.removeClass("invalid");
}
});
$dialog.find("button.extendedCommit").on("click", function () {
switchCommitMessageElement();
// this value will be set only when manually triggered
Preferences.set("useTextAreaForCommitByDefault", usingTextArea);
});
function prefillMessage(msg) {
if (msg.indexOf("\n") !== -1 && !usingTextArea) {
switchCommitMessageElement();
}
$dialog.find("[name='commit-message']:visible").val(msg);
recalculateMessageLength();
}
if (Preferences.get("useTextAreaForCommitByDefault")) {
switchCommitMessageElement();
}
if (prefilledMessage) {
prefillMessage(prefilledMessage.trim());
}
// Add focus to commit message input
getCommitMessageElement().focus();
$dialog.find("[name='commit-message']")
.on("keyup", recalculateMessageLength)
.on("change", recalculateMessageLength);
recalculateMessageLength();
dialog.done(function (buttonId) {
if (buttonId === "ok") {
// this event won't launch when commit-message is empty so its safe to assume that it is not
var commitMessage = getCommitMessageElement().val();
// if commit message is extended and has a newline, put an empty line after first line to separate subject and body
var s = commitMessage.split("\n");
if (s.length > 1 && s[1].trim() !== "") {
s.splice(1, 0, "");
}
commitMessage = s.join("\n");
// now we are going to be paranoid and we will check if some mofo didn't change our diff
_getStagedDiff().then(function (diff) {
if (diff === stagedDiff) {
return Svn.commit(commitMessage);
} else {
throw new Error("Index was changed while commit dialog was shown!");
}
}).catch(function (err) {
ErrorHandler.showError(err, "Git Commit failed");
}).finally(function () {
EventEmitter.emit(Events.GIT_COMMITED);
refresh();
});
} else {
// this will trigger refreshing where appropriate
Svn.status();
}
});
}
function _showAuthors(file, blame, fromLine, toLine) {
var linesTotal = blame.length;
var blameStats = blame.reduce(function (stats, lineInfo) {
var name = lineInfo.author + " " + lineInfo["author-mail"];
if (stats[name]) {
stats[name] += 1;
} else {
stats[name] = 1;
}
return stats;
}, {});
blameStats = _.reduce(blameStats, function (arr, val, key) {
arr.push({
authorName: key,
lines: val,
percentage: Math.round(val / (linesTotal / 100))
});
return arr;
}, []);
blameStats = _.sortBy(blameStats, "lines").reverse();
if (fromLine || toLine) {
file += " (" + Strings.LINES + " " + fromLine + "-" + toLine + ")";
}
var compiledTemplate = Mustache.render(gitAuthorsDialogTemplate, {
file: file,
blameStats: blameStats,
Strings: Strings
});
Dialogs.showModalDialogUsingTemplate(compiledTemplate);
}
function _getCurrentFilePath(editor) {
var projectRoot = Utils.getProjectRoot(),
document = editor ? editor.document : DocumentManager.getCurrentDocument(),
filePath = document.file.fullPath;
if (filePath.indexOf(projectRoot) === 0) {
filePath = filePath.substring(projectRoot.length);
}
return filePath;
}
function handleAuthorsSelection() {
var editor = EditorManager.getActiveEditor(),
filePath = _getCurrentFilePath(editor),
currentSelection = editor.getSelection(),
fromLine = currentSelection.start.line + 1,
toLine = currentSelection.end.line + 1;
// fix when nothing is selected on that line
if (currentSelection.end.ch === 0) { toLine = toLine - 1; }
var isSomethingSelected = currentSelection.start.line !== currentSelection.end.line ||
currentSelection.start.ch !== currentSelection.end.ch;
if (!isSomethingSelected) {
ErrorHandler.showError(new ExpectedError("Nothing is selected!"));
return;
}
Svn.getBlame(filePath, fromLine, toLine).then(function (blame) {
return _showAuthors(filePath, blame, fromLine, toLine);
}).catch(function (err) {
ErrorHandler.showError(err, "Git Blame failed");
});
}
function handleAuthorsFile() {
var filePath = _getCurrentFilePath();
Svn.getBlame(filePath).then(function (blame) {
return _showAuthors(filePath, blame);
}).catch(function (err) {
ErrorHandler.showError(err, "Git Blame failed");
});
}
function handleGitDiff(file) {
Svn.diffFileNice(file).then(function (diff) {
// show the dialog with the diff
var compiledTemplate = Mustache.render(gitDiffDialogTemplate, { file: file, Strings: Strings }),
dialog = Dialogs.showModalDialogUsingTemplate(compiledTemplate),
$dialog = dialog.getElement();
_makeDialogBig($dialog);
$dialog.find(".commit-diff").append(Utils.formatDiff(diff));
}).catch(function (err) {
ErrorHandler.showError(err, "SVN Diff failed");
});
}
function handleGitUndo(file) {
var compiledTemplate = Mustache.render(questionDialogTemplate, {
title: Strings.UNDO_CHANGES,
question: StringUtils.format(Strings.Q_UNDO_CHANGES, _.escape(file)),
Strings: Strings
});
Dialogs.showModalDialogUsingTemplate(compiledTemplate).done(function (buttonId) {
if (buttonId === "ok") {
Svn.discardFileChanges(file).then(function () {
var currentProjectRoot = Utils.getProjectRoot();
DocumentManager.getAllOpenDocuments().forEach(function (doc) {
if (doc.file.fullPath === currentProjectRoot + file) {
_reloadDoc(doc);
}
});
refresh();
}).catch(function (err) {
ErrorHandler.showError(err, "Git Checkout failed");
});
}
});
}
function handleGitDelete(file) {
var compiledTemplate = Mustache.render(questionDialogTemplate, {
title: Strings.DELETE_FILE,
question: StringUtils.format(Strings.Q_DELETE_FILE, _.escape(file)),
Strings: Strings
});
Dialogs.showModalDialogUsingTemplate(compiledTemplate).done(function (buttonId) {
if (buttonId === "ok") {
FileSystem.resolve(Utils.getProjectRoot() + file, function (err, fileEntry) {
if (err) {
ErrorHandler.showError(err, "Could not resolve file");
return;
}
Promise.cast(ProjectManager.deleteItem(fileEntry))
.then(function () {
refresh();
})
.catch(function (err) {
ErrorHandler.showError(err, "File deletion failed");
});
});
}
});
}
function handleGlobalUpdate(){
var files = [];
return handleSvnUpdate(files);
}
function handleSvnUpdate(files){
if(!_.isArray(files)) return;
return Svn.updateFile(files).then(function(stdout){
refresh();
});
}
/**
* strips trailing whitespace from all the diffs and adds \n to the end
*/
function stripWhitespaceFromFile(filename, clearWholeFile) {
return new Promise(function (resolve, reject) {
var fullPath = Utils.getProjectRoot() + filename,
removeBom = Preferences.get("removeByteOrderMark"),
normalizeLineEndings = Preferences.get("normalizeLineEndings");
var _cleanLines = function (lineNumbers) {
// clean the file
var fileEntry = FileSystem.getFileForPath(fullPath);
return FileUtils.readAsText(fileEntry).then(function (text) {
if (removeBom) {
// remove BOM - \ufeff
text = text.replace(/\ufeff/g, "");
}
if (normalizeLineEndings) {
// normalizes line endings
text = text.replace(/\r\n/g, "\n");
}
// process lines
var lines = text.split("\n");
if (lineNumbers) {
lineNumbers.forEach(function (lineNumber) {
lines[lineNumber] = lines[lineNumber].replace(/\s+$/, "");
});
} else {
lines.forEach(function (ln, lineNumber) {
lines[lineNumber] = lines[lineNumber].replace(/\s+$/, "");
});
}
// add empty line to the end, i've heard that git likes that for some reason
if (Preferences.get("addEndlineToTheEndOfFile")) {
var lastLineNumber = lines.length - 1;
if (lines[lastLineNumber].length > 0) {
lines[lastLineNumber] = lines[lastLineNumber].replace(/\s+$/, "");
}
if (lines[lastLineNumber].length > 0) {
lines.push("");
}
}
//-
text = lines.join("\n");
return Promise.cast(FileUtils.writeText(fileEntry, text))
.catch(function (err) {
ErrorHandler.logError("Wasn't able to clean whitespace from file: " + fullPath);
resolve();
throw err;
})
.then(function () {
// refresh the file if it's open in the background
DocumentManager.getAllOpenDocuments().forEach(function (doc) {
if (doc.file.fullPath === fullPath) {
_reloadDoc(doc);
}
});
// diffs were cleaned in this file
resolve();
});
});
};
if (clearWholeFile) {
_cleanLines(null);
} else {
Svn.diffFile(filename).then(function (diff) {
if (!diff) { return resolve(); }
var modified = [],
changesets = diff.split("\n").filter(function (l) { return l.match(/^@@/) !== null; });
// collect line numbers to clean
changesets.forEach(function (line) {
var i,
m = line.match(/^@@ -([,0-9]+) \+([,0-9]+) @@/),
s = m[2].split(","),
from = parseInt(s[0], 10),
to = from - 1 + (parseInt(s[1], 10) || 1);
for (i = from; i <= to; i++) { modified.push(i > 0 ? i - 1 : 0); }
});
_cleanLines(modified);
}).catch(function (ex) {
// This error will bubble up to preparing commit dialog so just log here
ErrorHandler.logError(ex);
reject(ex);
});
}
});
}
function _getStagedDiff() {
return Svn.getDiffOfStagedFiles().then(function (diff) {
if (!diff) {
return Svn.getListOfStagedFiles().then(function (filesList) {
return Strings.DIFF_FAILED_SEE_FILES + "\n\n" + filesList;
});
}
return diff;
});
}
// whatToDo gets values "continue" "skip" "abort"
function handleRebase(whatToDo) {
Svn.rebase(whatToDo).then(function () {
EventEmitter.emit(Events.REFRESH_ALL);
}).catch(function (err) {
ErrorHandler.showError(err, "Rebase " + whatToDo + " failed");
});
}
function abortMerge() {
Svn.discardAllChanges().then(function () {
EventEmitter.emit(Events.REFRESH_ALL);
}).catch(function (err) {
ErrorHandler.showError(err, "Merge abort failed");
});
}
function findConflicts() {
FindInFiles.doSearch(/^<<<<<<<\s|^=======\s|^>>>>>>>\s/gm);
}
function commitMerge() {
Utils.loadPathContent(Utils.getProjectRoot() + "/.git/MERGE_MSG").then(function (msg) {
handleGitCommit(msg);
}).catch(function (err) {
ErrorHandler.showError(err, "Merge commit failed");
});
}
function handleGitCommit(prefilledMessage) {
var codeInspectionEnabled = Preferences.get("useCodeInspection");
var stripWhitespace = Preferences.get("stripWhitespaceFromCommits");
// Disable button (it will be enabled when selecting files after reset)
Utils.setLoading($gitPanel.find(".git-commit"));
// First reset staged files, then add selected files to the index.
Svn.status().then(function (files) {
files = _.filter(files, function (file) {
return file.status.indexOf(Svn.FILE_STATUS.MODIFIED) !== -1;
});
if (files.length === 0) {
return ErrorHandler.showError(new Error("Commit button should have been disabled"), "Nothing staged to commit");
}
var lintResults = [],
promises = [];
files.forEach(function (fileObj) {
var queue = Promise.resolve();
var isDeleted = fileObj.status.indexOf(Svn.FILE_STATUS.DELETED) !== -1,
updateIndex = isDeleted;
// strip whitespace if configured to do so and file was not deleted
if (stripWhitespace && !isDeleted) {
// strip whitespace only for recognized languages so binary files won't get corrupted
var langId = LanguageManager.getLanguageForPath(fileObj.file).getId();
if (["unknown", "binary", "image", "markdown"].indexOf(langId) === -1) {
queue = queue.then(function () {
var clearWholeFile = fileObj.status.indexOf(Svn.FILE_STATUS.UNTRACKED) !== -1 ||
fileObj.status.indexOf(Svn.FILE_STATUS.RENAMED) !== -1;
return stripWhitespaceFromFile(fileObj.file, clearWholeFile);
});
}
}
// do a code inspection for the file, if it was not deleted
if (codeInspectionEnabled && !isDeleted) {
queue = queue.then(function () {
return lintFile(fileObj.file).then(function (result) {
if (result) {
lintResults.push({
filename: fileObj.file,
result: result
});
}
});
});
}
promises.push(queue);
});
return Promise.all(promises).then(function () {
// All files are in the index now, get the diff and show dialog.
return _getStagedDiff().then(function (diff) {
return _showCommitDialog(diff, lintResults, prefilledMessage);
});
});
}).catch(function (err) {
ErrorHandler.showError(err, "Preparing commit dialog failed");
}).finally(function () {
Utils.unsetLoading($gitPanel.find(".git-commit"));
});
}
function refreshCurrentFile() {
var currentProjectRoot = Utils.getProjectRoot();
var currentDoc = DocumentManager.getCurrentDocument();
if (currentDoc) {
$gitPanel.find("tr").each(function () {
var currentFullPath = currentDoc.file.fullPath,
thisFile = $(this).attr("x-file");
$(this).toggleClass("selected", currentProjectRoot + thisFile === currentFullPath);
});
} else {
$gitPanel.find("tr").removeClass("selected");
}
}
function shouldShow(fileObj) {
if (showFileWhiteList.test(fileObj.name)) {
return true;
}
return ProjectManager.shouldShow(fileObj);
}
function _refreshTableContainer(files) {
if (!gitPanel.isVisible()) {
return;
}
// remove files that we should not show
files = _.filter(files, function (file) {
return shouldShow(file);
});
var allStaged = files.length > 0 && _.all(files, function (file) { return file.status.indexOf(Svn.FILE_STATUS.STAGED) !== -1; });
$gitPanel.find(".check-all").prop("checked", allStaged).prop("disabled", files.length === 0);
var $editedList = $tableContainer.find(".git-edited-list");
var visibleBefore = $editedList.length ? $editedList.is(":visible") : true;<|fim▁hole|> if (files.length === 0) {
$tableContainer.append($("<p class='git-edited-list nothing-to-commit' />").text(Strings.NOTHING_TO_COMMIT));
} else {
// if desired, remove untracked files from the results
if (showingUntracked === false) {
files = _.filter(files, function (file) {
return file.status.indexOf(Svn.FILE_STATUS.UNTRACKED) === -1;
});
}
// -
files.forEach(function (file) {
file.staged = file.status.indexOf(Svn.FILE_STATUS.STAGED) !== -1;
file.statusText = file.status.map(function (status) {
return Strings["FILE_" + status];
}).join(", ");
file.allowDiff = file.status.indexOf(Svn.FILE_STATUS.UNTRACKED) === -1 &&
file.status.indexOf(Svn.FILE_STATUS.RENAMED) === -1 &&
file.status.indexOf(Svn.FILE_STATUS.DELETED) === -1;
file.allowDelete = file.status.indexOf(Svn.FILE_STATUS.UNTRACKED) !== -1;
file.allowUndo = !file.allowDelete && file.status.indexOf(Svn.FILE_STATUS.MODIFIED) !== -1;
file.allowUpdate = file.status.indexOf(Svn.FILE_STATUS.OUTOFDATE) !== -1;
file.allowAdd = file.status.indexOf(Svn.FILE_STATUS.UNTRACKED) > -1;
});
$tableContainer.append(Mustache.render(gitPanelResultsTemplate, {
files: files,
Strings: Strings
}));
refreshCurrentFile();
}
$tableContainer.find(".git-edited-list").toggle(visibleBefore);
}
function refresh() {
// set the history panel to false and remove the class that show the button history active when refresh
$gitPanel.find(".git-history-toggle").removeClass("active").attr("title", Strings.TOOLTIP_SHOW_HISTORY);
$gitPanel.find(".git-file-history").removeClass("active").attr("title", Strings.TOOLTIP_SHOW_FILE_HISTORY);
if (gitPanelMode === "not-repo") {
$tableContainer.empty();
return Promise.resolve();
}
$tableContainer.find("#git-history-list").remove();
$tableContainer.find(".git-edited-list").show();
var p1 = Svn.status(true);
//- push button
//var $pushBtn = $gitPanel.find(".git-push");
// var p2 = Svn.getCommitsAhead().then(function (commits) {
// $pushBtn.children("span").remove();
// if (commits.length > 0) {
// $pushBtn.append($("<span/>").text(" (" + commits.length + ")"));
// }
// }).catch(function () {
// $pushBtn.children("span").remove();
// });
// FUTURE: who listens for this?
return Promise.all([p1]);
}
function toggle(bool) {
if (gitPanelDisabled === true) {
return;
}
if (typeof bool !== "boolean") {
bool = !gitPanel.isVisible();
}
Preferences.persist("panelEnabled", bool);
Main.$icon.toggleClass("on", bool);
gitPanel.setVisible(bool);
// Mark menu item as enabled/disabled.
CommandManager.get(PANEL_COMMAND_ID).setChecked(bool);
if (bool) {
refresh();
}
}
function handleToggleUntracked() {
showingUntracked = !showingUntracked;
$gitPanel
.find(".git-toggle-untracked")
.text(showingUntracked ? Strings.HIDE_UNTRACKED : Strings.SHOW_UNTRACKED);
refresh();
}
function commitCurrentFile() {
return Promise.cast(CommandManager.execute("file.save"))
.then(function () {
return Svn.resetIndex();
})
.then(function () {
var currentProjectRoot = Utils.getProjectRoot();
var currentDoc = DocumentManager.getCurrentDocument();
if (currentDoc) {
var relativePath = currentDoc.file.fullPath.substring(currentProjectRoot.length);
return Svn.stage(relativePath).then(function () {
return handleGitCommit();
});
}
});
}
function commitAllFiles() {
return Promise.cast(CommandManager.execute("file.saveAll"))
.then(function () {
return Svn.resetIndex();
})
.then(function () {
return Svn.stageAll().then(function () {
return handleGitCommit();
});
});
}
// Disable "commit" button if there aren't staged files to commit
function _toggleCommitButton(files) {
var anyStaged = _.any(files, function (file) { return file.status.indexOf(Svn.FILE_STATUS.STAGED) !== -1; });
$gitPanel.find(".git-commit").prop("disabled", !anyStaged);
}
EventEmitter.on(Events.GIT_STATUS_RESULTS, function (results) {
_refreshTableContainer(results);
_toggleCommitButton(results);
});
function undoLastLocalCommit() {
Svn.undoLastLocalCommit()
.catch(function (err) {
ErrorHandler.showError(err, "Impossible to undo last commit");
})
.finally(function () {
refresh();
});
}
var lastCheckOneClicked = null;
function attachDefaultTableHandlers() {
$tableContainer = $gitPanel.find(".table-container")
.off()
.on("click", ".check-one", function (e) {
e.stopPropagation();
var $tr = $(this).closest("tr"),
file = $tr.attr("x-file"),
status = $tr.attr("x-status"),
isChecked = $(this).is(":checked");
if (e.shiftKey) {
// do something if we press shift. Right now? Nothing.
}
lastCheckOneClicked = file;
})
.on("dblclick", ".check-one", function (e) {
e.stopPropagation();
})
.on("click", ".btn-git-diff", function (e) {
e.stopPropagation();
handleGitDiff($(e.target).closest("tr").attr("x-file"));
})
.on("click", ".btn-git-undo", function (e) {
e.stopPropagation();
handleGitUndo($(e.target).closest("tr").attr("x-file"));
})
.on("click", ".btn-git-delete", function (e) {
e.stopPropagation();
handleGitDelete($(e.target).closest("tr").attr("x-file"));
})
.on("click", ".btn-svn-add", function(e) {
e.stopPropagation();
handleSvnAdd($(e.target).closest("tr").attr("x-file"));
})
.on("click", ".btn-svn-update", function (e) {
e.stopPropagation();
handleSvnUpdate([$(e.target).closest("tr").attr("x-file")]);
})
.on("click", ".modified-file", function (e) {
var $this = $(e.currentTarget);
if ($this.attr("x-status") === Svn.FILE_STATUS.DELETED) {
return;
}
CommandManager.execute(Commands.FILE_OPEN, {
fullPath: Utils.getProjectRoot() + $this.attr("x-file")
});
})
.on("dblclick", ".modified-file", function (e) {
var $this = $(e.currentTarget);
if ($this.attr("x-status") === Svn.FILE_STATUS.DELETED) {
return;
}
FileViewController.addToWorkingSetAndSelect(Utils.getProjectRoot() + $this.attr("x-file"));
});
}
function discardAllChanges() {
return Utils.askQuestion(Strings.RESET_LOCAL_REPO, Strings.RESET_LOCAL_REPO_CONFIRM, { booleanResponse: true })
.then(function (response) {
if (response) {
return Svn.discardAllChanges().catch(function (err) {
ErrorHandler.showError(err, "Reset of local repository failed");
}).then(function () {
refresh();
});
}
});
}
function init() {
// Add panel
var panelHtml = Mustache.render(svnPanelTemplate, {
enableAdvancedFeatures: Preferences.get("enableAdvancedFeatures"),
showBashButton: Preferences.get("showBashButton"),
showReportBugButton: Preferences.get("showReportBugButton"),
S: Strings
});
var $panelHtml = $(panelHtml);
$panelHtml.find(".git-available, .git-not-available").hide();
gitPanel = PanelManager.createBottomPanel("brackets-git.panel", $panelHtml, 100);
$gitPanel = gitPanel.$panel;
$gitPanel
.on("click", ".close", toggle)
.on("click", ".check-all", function () {
$('.check-one').attr('checked',true);
})
.on("click", ".git-refresh", EventEmitter.emitFactory(Events.REFRESH_ALL))
.on("click", ".git-commit", EventEmitter.emitFactory(Events.HANDLE_GIT_COMMIT))
.on("click", ".git-commit-merge", commitMerge)
.on("click", ".svn-update", handleGlobalUpdate)
.on("click", ".git-find-conflicts", findConflicts)
.on("click", ".git-prev-gutter", GutterManager.goToPrev)
.on("click", ".git-next-gutter", GutterManager.goToNext)
.on("click", ".git-toggle-untracked", handleToggleUntracked)
.on("click", ".authors-selection", handleAuthorsSelection)
.on("click", ".authors-file", handleAuthorsFile)
.on("click", ".git-file-history", EventEmitter.emitFactory(Events.HISTORY_SHOW, "FILE"))
.on("click", ".git-history-toggle", EventEmitter.emitFactory(Events.HISTORY_SHOW, "GLOBAL"))
.on("click", ".git-bug", ErrorHandler.reportBug)
.on("click", ".git-settings", SettingsDialog.show)
.on("contextmenu", "tr", function (e) {
var $this = $(this);
if ($this.hasClass("history-commit")) { return; }
$this.click();
setTimeout(function () {
Menus.getContextMenu("git-panel-context-menu").open(e);
}, 1);
})
.on("click", ".git-bash", EventEmitter.emitFactory(Events.TERMINAL_OPEN))
.on("click", ".reset-all", discardAllChanges);
// Attaching table handlers
attachDefaultTableHandlers();
// Commit current and all shortcuts
var COMMIT_CURRENT_CMD = "brackets-git.commitCurrent",
COMMIT_ALL_CMD = "brackets-git.commitAll",
BASH_CMD = "brackets-git.launchBash",
PUSH_CMD = "brackets-git.push",
PULL_CMD = "brackets-git.pull",
GOTO_PREV_CHANGE = "brackets-git.gotoPrevChange",
GOTO_NEXT_CHANGE = "brackets-git.gotoNextChange";
// Add command to menu.
// Register command for opening bottom panel.
CommandManager.register(Strings.PANEL_COMMAND, PANEL_COMMAND_ID, toggle);
KeyBindingManager.addBinding(PANEL_COMMAND_ID, Preferences.get("panelShortcut"));
CommandManager.register(Strings.COMMIT_CURRENT_SHORTCUT, COMMIT_CURRENT_CMD, commitCurrentFile);
KeyBindingManager.addBinding(COMMIT_CURRENT_CMD, Preferences.get("commitCurrentShortcut"));
CommandManager.register(Strings.COMMIT_ALL_SHORTCUT, COMMIT_ALL_CMD, commitAllFiles);
KeyBindingManager.addBinding(COMMIT_ALL_CMD, Preferences.get("commitAllShortcut"));
CommandManager.register(Strings.LAUNCH_BASH_SHORTCUT, BASH_CMD, EventEmitter.emitFactory(Events.TERMINAL_OPEN));
KeyBindingManager.addBinding(BASH_CMD, Preferences.get("bashShortcut"));
CommandManager.register(Strings.PUSH_SHORTCUT, PUSH_CMD, EventEmitter.emitFactory(Events.HANDLE_PUSH));
KeyBindingManager.addBinding(PUSH_CMD, Preferences.get("pushShortcut"));
CommandManager.register(Strings.PULL_SHORTCUT, PULL_CMD, EventEmitter.emitFactory(Events.HANDLE_PULL));
KeyBindingManager.addBinding(PULL_CMD, Preferences.get("pullShortcut"));
CommandManager.register(Strings.GOTO_PREVIOUS_GIT_CHANGE, GOTO_PREV_CHANGE, GutterManager.goToPrev);
KeyBindingManager.addBinding(GOTO_PREV_CHANGE, Preferences.get("gotoPrevChangeShortcut"));
CommandManager.register(Strings.GOTO_NEXT_GIT_CHANGE, GOTO_NEXT_CHANGE, GutterManager.goToNext);
KeyBindingManager.addBinding(GOTO_NEXT_CHANGE, Preferences.get("gotoNextChangeShortcut"));
// Init moment - use the correct language
moment.lang(brackets.getLocale());
if(Svn.isWorkingCopy()){
enable();
}
// Show gitPanel when appropriate
if (Preferences.get("panelEnabled")) {
toggle(true);
}
}
function enable() {
EventEmitter.emit(Events.SVN_ENABLED);
// this function is called after every Branch.refresh
gitPanelMode = null;
//
$gitPanel.find(".git-available").show();
$gitPanel.find(".git-not-available").hide();
//
Main.$icon.removeClass("warning").removeAttr("title");
gitPanelDisabled = false;
// after all is enabled
refresh();
}
function disable(cause) {
EventEmitter.emit(Events.GIT_DISABLED, cause);
gitPanelMode = cause;
// causes: not-repo
if (gitPanelMode === "not-repo") {
$gitPanel.find(".git-available").hide();
$gitPanel.find(".git-not-available").show();
} else {
Main.$icon.addClass("warning").attr("title", cause);
toggle(false);
gitPanelDisabled = true;
}
refresh();
}
// Event listeners
EventEmitter.on(Events.BRACKETS_CURRENT_DOCUMENT_CHANGE, function () {
if (!gitPanel) { return; }
refreshCurrentFile();
});
EventEmitter.on(Events.BRACKETS_DOCUMENT_SAVED, function () {
if (!gitPanel) { return; }
refresh();
});
EventEmitter.on(Events.REBASE_MERGE_MODE, function (rebaseEnabled, mergeEnabled) {
$gitPanel.find(".git-rebase").toggle(rebaseEnabled);
$gitPanel.find(".git-merge").toggle(mergeEnabled);
$gitPanel.find("button.git-commit").toggle(!rebaseEnabled && !mergeEnabled);
});
EventEmitter.on(Events.HANDLE_GIT_COMMIT, function () {
handleGitCommit();
});
exports.init = init;
exports.refresh = refresh;
exports.toggle = toggle;
exports.enable = enable;
exports.disable = disable;
exports.getPanel = function () { return $gitPanel; };
});<|fim▁end|>
|
$editedList.remove();
|
<|file_name|>issue79.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#
# Reporter: pixdamix
#
# What steps will reproduce the problem?
# ======================================
#
# 1. Given three packages A, B, C
# A depends on B
# A failed postinst and is in unpacked state
# C depends on B<|fim▁hole|># 2. Upgrade to a new version of C which do not depends on B anymore, and use
# --autoremove
#
#
# What is the expected output? What do you see instead?
# =====================================================
#
# B should not be removed, bot opkg uninstall it.
#
#
# Status
# ======
#
# Fixed in r625.
import os
import opk, cfg, opkgcl
opk.regress_init()
o = opk.OpkGroup()
o.add(Package="a", Version="1.0", Depends="b")
o.add(Package="b", Version="1.0")
o.add(Package="c", Version="1.0", Depends="b")
o.write_opk()
o.write_list()
opkgcl.update()
opkgcl.install("a")
opkgcl.install("c")
opkgcl.flag_unpacked("a")
o = opk.OpkGroup()
o.add(Package="a", Version="1.0", Depends="b")
o.add(Package="b", Version="1.0")
o.add(Package="c", Version="2.0")
o.write_opk()
o.write_list()
opkgcl.update()
opkgcl.upgrade("--autoremove")
if not opkgcl.is_installed("b", "1.0"):
opk.fail("b has been removed even though a still depends on it")<|fim▁end|>
|
#
|
<|file_name|>editor.spec.js<|end_file_name|><|fim▁begin|>describe('Controller: EditorCtrl', function () {
var $rootScope, $scope, $controller;
beforeEach( module( 'PMTViewer' ) );
<|fim▁hole|>
$controller('EditorCtrl', { '$rootScope' : $rootScope, '$scope': $scope });
}));
it('should have a defined title', function () {
expect($scope.page.title).toBeDefined();
});
it('should have a defined sub-title', function () {
expect($scope.page.subtitle).toBeDefined();
});
});<|fim▁end|>
|
beforeEach(inject(function (_$rootScope_, _$controller_) {
$rootScope = _$rootScope_;
$scope = $rootScope.$new();
$controller = _$controller_;
|
<|file_name|>toodle_sync.py<|end_file_name|><|fim▁begin|>'''
Simple pull of account info
'''
import requests
import datetime
import pickle
import json
import time
import sys
account_url = 'https://api.toodledo.com/3/account/get.php?access_token='
tasks_get_url = 'https://api.toodledo.com/3/tasks/get.php?access_token='
'''
Fields you can use to filter when you get tasks:
https://api.toodledo.com/3/tasks/index.php under "Task Datatypes"
'''
def load_token(token):
token = pickle.load( open(token, 'rb'))
return token
def sync(token):
token = load_token(token)
get_account = requests.get('{}{}'.format(account_url, token['access_token']))
#cur_task = int(get_account.text['lastedit_task'])
return get_account.text
def query_tasks(token, days, completion_state='1', fields='tag,context,goal'):
token = load_token(token)
# Get Tasks from Monday (ie 4 days ago since we cron for friday)
start_date = datetime.date.today() - datetime.timedelta(days=days)
# Make it Epoch Time
start_date = int(time.mktime(start_date.timetuple()))
start_date = str(start_date)
# Get ALL tasks from start_date'
# Comp codes -- 1 == completed, 0 == incomplete, -1 == both
get_tasks = requests.get('{}{}&after={}&comp={}&fields={}'.format(tasks_get_url, token['access_token'], start_date, completion_state, fields))
pickle.dump(get_tasks.text, open('tasks_queried.pkl', 'wb'))
return get_tasks.text
def parse_to_json(response):
data = pickle.load(open(response, 'rb'))
return json.loads(data)
def arrange_date(epoch_time):
completion = time.strftime('%A, %b %d, %Y', time.gmtime(epoch_time))
return completion
def display_tasks(task_dump, context_pickle, days=4):
task_dump = parse_to_json(task_dump)
contexts = make_context_hash(context_pickle)
start_date = datetime.date.today() - datetime.timedelta(days=days)<|fim▁hole|> end_date = datetime.date.today()
end_date = datetime.date.strftime(end_date, '%A, %b %d, %Y')
print 'Tasks Created between {} and {}.'.format(start_date, end_date)
print 'Total Tasks: ', task_dump[0]['total']
for i in range(len(task_dump)):
#print task_dump[i]
# print contexts
if 'completed' in task_dump[i]:
if task_dump[i]['completed'] == 0:
print 'Incomplete Task: {}'.format(task_dump[i]['title'])
elif contexts[task_dump[i]['context']] != 'Standing Meeting':
comp_date = arrange_date(task_dump[i]['completed'])
print 'Completed Task : {}, Completed {}'.format(task_dump[i]['title'], comp_date)
else:
pass
#test = display_tasks('tasks_queried.pkl', 4)
def format_task(task):
'''
Take a dictionary formatted task from display tasks and print it
out to something human readable.
'''
comp_date = arrange_date(task['completed'])
print 'Completed Task : {}, Completed {}'.format(task['title'], comp_date)
def get_completed_tasks():
query = query_tasks('auth_token.pkl', 4, '1')
return query
def get_incomplete_tasks():
query = query_tasks('auth_token.pkl', 4, '0')
return query
def get_all_tasks():
query = query_tasks('auth_token.pkl', 4, '-1')
return query
def get_defined_list_ids(token, defined_list):
valid_lists = ['goals', 'contexts']
if defined_list.lower() not in valid_lists:
print 'Not a valid user defined list, exiting...'
sys.exit(2)
token = load_token(token)
query = requests.get('http://api.toodledo.com/3/{}/get.php?access_token={}'.format(defined_list, token['access_token']))
pickle.dump(query.text, open('{}_queried.pkl'.format(defined_list), 'wb'))
return query.text
def make_context_hash(defined_list_pickle):
contexts = pickle.load( open(defined_list_pickle, 'rb'))
contexts = json.loads(contexts)
out = {}
for i in range(len(contexts)):
out[contexts[i]['id']] = contexts[i]['name']
return out
#tasks = get_completed_tasks()
#print tasks
if __name__ == '__main__':
tdump = display_tasks('tasks_queried.pkl', 4)<|fim▁end|>
|
start_date = datetime.date.strftime(start_date, '%A, %b %d, %Y')
|
<|file_name|>prefs.py<|end_file_name|><|fim▁begin|>import os, socket, sys, urllib
from wx.lib.embeddedimage import PyEmbeddedImage
ldc_name = "Live Debian Creator"
ldc_cli_version = "1.4.0"
ldc_gui_version = "1.11.0"
if (sys.platform == "win32"):
slash = "\\"
if os.path.isfile(sys.path[0]): #fix for compiled binaries
homepath = os.path.dirname(sys.path[0]) + slash
else:
homepath = sys.path[0] + slash
else:
slash = "/"
#socket.setdefaulttimeout(10)
def defineBrowserAgent(uiname, uiversion):
class AppURLopener(urllib.FancyURLopener):
version = "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1)"
#version = uiname + " " + uiversion + " / " + sys.platform
urllib._urlopener = AppURLopener()
bookico = PyEmbeddedImage(
"iVBORw0KGgoAAAANSUhEUgAAACAAAAAgCAYAAABzenr0AAAABHNCSVQICAgIfAhkiAAABPZJ"
"REFUWIWtl09sFFUcxz/iYN+YTZyNxewiTWiV6FZQtqhkN3pgGw6UW6unJVxEDtaThJMc9WLg"
"oj1hwJhANURqQkw9NGwTla0c6JqArELSMVCzi63uKJX5UR7Bw8x0Z2d3yxJ5yctM3vu93/f7"
"+ztv4CGPqamp9A/nL2Q6lTceBqht26pw7kL+7K+10S/tJ9OpBBPASCdnH/k/wFNTU+nzc/+M"
"2v925a2N21Sq1yKJg/wxV7XWyIHBnYPjD53A9PS0mrv+e/6yw6gT60+72iK7AVJJSBoCBihD"
"AVC6WK7O3bx3+thFyY30ycSH7+w5FNXXcQgymUzaei49+vHMX/kq/SqpYGiDRbYHlBFoigMu"
"gklxHsZ1NlG4yygvKiruWauV3vsS2L59e+qZVwfHqsnB3G8LkI2ZHHzdImGBaZi+BgVaqIhi"
"sqo4uQBlrQDPI2jx5gMQUFu39A3veW3ru9leMmO19aQ2JDm8C5SCuDJBgUJRM6DkKE5WFYUF"
"cLSAxgOnNeiqBHZt6z2wO2UdSvXGrfimFNYrIzhHbca/LlOcTzL0coJsj8IRKC4pJhfAXvKB"
"dKBFQu+AdjsnsG/AOpzc+RZWKkc8FgcFGDYApas1SgtAUjxXJOK+a1XUgRHrzc4JlMslqB5C"
"ZYbg+Sws2rAYByPlSQcntNQtNSLaNGCoxv07HRJAQ63ioM6MI2fGPdt6DngKDbVK1kS9IKBV"
"PQmN6P4qBNAgGlw/jqJp9vKKBtVILrA4nA+GegAPBCT8Z0P6RF0dvAfgwdRRIu2rYfU+sLKr"
"mtcCq3UIPGyABmupzIBRoOIkuXzF7oyACq2KDne5FmQC2fC+UyWtZxmIlchtseg1sti2yzf2"
"z8n8559kdmzbYW/evLnalgAGmLr+Lp00aw3WYomUUaDfKpNJphmIDWEZXvd1N9m80HNj+Fs5"
"Pvx0TY0AE6sQUGB45SOA0m0kwyWnHfLdh8nGd5NJDGMqEwyXoi5QXJrAltmVsNxabq2mrWVi"
"qHoitkpCBJwKp6uTVDbaVGKziK5wWWaQoAOGu2IbO5pGkLfuKocD5WrJwVRQXirjXC+DAdY6"
"1ZSYCng8cnxNk8K1fukF/eA+FqAFpIaiMT0VXgIr5fcohUfosca23EzgTh3cDep5taFdcCN1"<|fim▁hole|> "/33b/kXY94VD/KWPjvY9lduVvaWxCVzYYipxW1eKFhwRajcdat9RemP+vd2jbx6cCIt19Gf0"
"6fETw28fKR6jf9Ci24LuuFeuMWC2IIlLXxVl70+5ZDckuxWuFuIxqIjgTDOjzvV9UC7OTbbS"
"3fGvmW3bauyzE/nCFXe4dIMsy45tVX889oT+83RXV5d5bf21MXIyZD3re2WGgnyfOFK9VG0J"
"/MAEOhmnTp1KXF28mlsXWzezf+/+1legyPgPTicVRBS2XfsAAAAASUVORK5CYII=")
getbookicoIcon = bookico.GetIcon<|fim▁end|>
|
"bviAMTB98OZqakfAH65vx4rqKBlNm2+8grUeWGCrGW5S9yWwti7ofW5Ucx9rIBK6bIRB2lVN"
"Y29tQcBonG4Ta6k/NSBeDkSH2Sp0GoiUYYsQ+AB+0rTt4hov/lpQ0lrKDT/F66y3IjLN9rmh"
"VQVo1b4StHgkWhAIEjioKBFfx91GFzR5wJ5HRINpem3YQfzyklAihgCjxDT1SvLvLLLkR0rA"
"jdzOmjxwotbVf656+/20YmS9wrIfvSdO8p53A0UAM0RihVqIjNSB/WXRIFpwXVhebgxCkwdu"
|
<|file_name|>ca.js<|end_file_name|><|fim▁begin|>/*
Copyright (c) 2003-2014, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
<|fim▁hole|> preview: 'Visualització prèvia'
} );<|fim▁end|>
|
*/
CKEDITOR.plugins.setLang( 'preview', 'ca', {
|
<|file_name|>article_detail.js<|end_file_name|><|fim▁begin|>var angularjs = angular.module('articleDetailModule', ['courseTagServiceModule', 'ngCookies', 'ngVideo']);
angularjs.controller('ArticleDetailController', ['$rootScope', '$scope',
'$http', '$stateParams', '$state', '$location',
'CourseTagService', '$sce', '$cookies', '$httpParamSerializer', 'video', '$route',
function($rootScope, $scope, $http, $stateParams,
$state, $location, courseTagSrv, $sce, $cookies, $httpParamSerializer,
video, $route) {
if ($stateParams.courseId === undefined) {
$state.go('home');
}
var token = $location.search().token;
if (token !== undefined) {
console.log('set token on cookie');
$cookies.put('access_token', token);
}
$scope.showShare = false;
$scope.shareImg = "img/share_400_400_2.png";
$scope.courseUrl = $location.absUrl();
console.log('location=', $scope.courseUrl);
var util = new DomainNameUtil($location);
$scope.originUrl = window.location.href;
console.log('get access token:', $cookies.get('access_token'));
$scope.favoriteCls = 'fontawesome-heart-empty';
$scope.favoriteText = '收藏';
$http.get(util.getBackendServiceUrl() +
'/course/proposal/' + $stateParams.courseId, {
headers: {
'access_token': $cookies.get('access_token')
}
}).
success(function(e) {
console.log('get course ', e);
$scope.course = e;
$rootScope.title = e.name;
var $body = $('body');
var $iframe = $('<iframe src="/favicon.ico"></iframe>');
$iframe.on('load', function() {
setTimeout(function() {
$iframe.off('load').remove();
}, 0);
}).appendTo($body);
$scope.course.videoUrl = $sce.trustAsResourceUrl($scope.course.videoUrl);
document.getElementById('article_content').innerHTML = $scope.course.content;
// video.addSource('mp4',$scope.course.videoUrl);
setFavoriteDom();
configJSAPI();
}).error(function(e) {
});
$http.get(util.getBackendServiceUrl() +
'/course/proposal/query?number=3&ignore_course_id=' + $stateParams.courseId)
.success(function(e) {
console.log('get related courses ', e);
$scope.relatedCourses = e;
}).error(function(e) {
});
courseTagSrv.getCourseTags().then(function(e) {
$scope.courseTags = e;
});
$scope.background = function(course) {
return {
'background-image': 'url(' + course.titleImageUrl + ')',
'background-size': '100%'
};
}
$scope.goToCourseTag = function(tag, $event) {
console.log('go to course tag');
$state.go('course_tags', {
courseTagId: tag.id,
courseName: tag.name
});
$event.stopPropagation();
}
$scope.share = function() {
console.log('share');
$scope.showShare = true;
// var ret = recordShareFavorite('SHARE');
// ret.success(function(e){
// });
}
$scope.favorite = function() {
console.log('favorite');
if ($cookies.get('access_token') === undefined) {
var redirect = encodeURI($scope.courseUrl).replace('#', '%23');
console.log('redirect=', encodeURI($scope.courseUrl).replace('#', '%23'));
window.location.href = 'https://open.weixin.qq.com/connect/oauth2/authorize?appid=wxfe34c2ab5b5c5813&redirect_uri=http%3a%2f%2fwww.imzao.com%2feducation%2fzaozao%2fwechat%2flogin&response_type=code&scope=snsapi_userinfo&state=WECHAT_SERVICE-' + redirect + '#wechat_redirect';
return;
}
var promise = recordShareFavorite('FAVORITE');
promise.success(function(e) {
console.log('favorite success ', e);
$scope.course.favorited = !$scope.course.favorited;
setFavoriteDom();
}).error(function(e) {
console.log('share failed');
});
}
function setFavoriteDom() {
if ($scope.course.favorited === true) {
$scope.favoriteCls = 'fontawesome-heart';
$scope.favoriteText = '已收藏';
} else {
$scope.favoriteCls = 'fontawesome-heart-empty';
$scope.favoriteText = '收藏';
}
}
$scope.hideShare = function() {
$scope.showShare = false;
}
$scope.showPlayButton = true;
$scope.showVideo = false;
$scope.playVideo = function(e) {
console.log('course video,', $("#course_video"));
$("#course_video")[0].play();
}
document.getElementById('course_video').addEventListener('webkitendfullscreen', function(e) {
// handle end full screen
console.log('webkitendfullscreen');
$scope.showVideo = false;
$scope.showPlayButton = true;
$scope.$apply();
});
document.getElementById('course_video').addEventListener('webkitenterfullscreen', function(e) {
// handle end full screen
console.log('webkitenterfullscreen');
$scope.showVideo = true;
$scope.$apply();
});
// $scope.videoEnded = function(e) {
// console.log('video ended ');
// $scope.showPlayButton = true;
// }
// $scope.videoPaused = function(e) {
// console.log('video paused ');
// $scope.showPlayButton = true;
// }
function configJSAPI() {
console.log('js api config:', $scope.courseUrl);
$http.get(util.getBackendServiceUrl() + '/wechat/jsapi?url=' + $scope.courseUrl.split('#')[0].replace('&', '%26'))
.success(function(e) {
console.log(e);
var signature = e;
wx.config({
debug: false,<|fim▁hole|> appId: e.appid,
timestamp: e.timestamp,
nonceStr: e.noncestr,
signature: e.signature,
jsApiList: ['checkJsApi', 'onMenuShareTimeline', 'onMenuShareAppMessage']
});
wx.ready(function() {
console.log('wx ready');
});
wx.error(function(res) {
console.log('wx error');
});
wx.onMenuShareTimeline({
title: $scope.course.name,
link: $scope.courseUrl,
imgUrl: encodeURI($scope.course.titleImageUrl),
success: function() {
console.log('share success');
scope.showShare = false;
recordShareFavorite('SHARE');
},
cancel: function() {
console.log('cancel share');
scope.showShare = false;
}
});
var shareDesc = '';
console.log('share desc:', $scope.course.introduction);
if ($scope.course.introduction !== null && $scope.course.introduction !== 'undefined') {
shareDesc = $scope.course.introduction;
}
wx.onMenuShareAppMessage({
title: $scope.course.name, // 分享标题
desc: shareDesc, // 分享描述
link: $scope.courseUrl, // 分享链接
imgUrl: encodeURI($scope.course.titleImageUrl), // 分享图标
// 分享类型,music、video或link,不填默认为link
// 如果type是music或video,则要提供数据链接,默认为空
success: function(res) {
// 用户确认分享后执行的回调函数
console.log('share success');
recordShareFavorite('SHARE');
scope.showShare = false;
},
cancel: function(res) {
// 用户取消分享后执行的回调函数
console.log('cancel share');
scope.showShare = false;
},
fail: function(res) {
}
});
}).error(function(e) {
});
}
function recordShareFavorite(activity) {
var link = util.getBackendServiceUrl() + '/course/interactive';
var req = {
method: 'POST',
url: link,
headers: {
'Content-Type': 'application/x-www-form-urlencoded;charset=UTF-8',
'access_token': $cookies.get('access_token')
//'Content-Type': 'multipart/form-data; charset=utf-8;'
},
data: $httpParamSerializer({
course_id: $scope.course.id,
flag: activity
})
};
return $http(req);
}
}
]);
angularjs.directive('videoLoader', function() {
return function(scope, element, attrs) {
scope.$watch(attrs.videoLoader, function() {
console.log('element:', element);
$("#course_video").bind('ended', function() {
console.log('video ended.');
// element.removeAttr('controls');
scope.showPlayButton = true;
scope.showVideo = false;
scope.$apply();
// $(this).unbind('ended');
// if (!this.hasPlayed) {
// return;
// }
});
$("#course_video").bind('pause', function() {
console.log('video paused.');
scope.showPlayButton = false;
scope.showVideo = true;
// element.attr('controls',true);
scope.$apply();
// $(this).unbind('paused');
// if (!this.hasPlayed) {
// return;
// }
});
$("#course_video").bind('play', function() {
console.log('video played.');
scope.showPlayButton = false;
scope.showVideo = true;
// element.attr('controls',true);
scope.$apply();
// $(this).unbind('played');
// if (!this.hasPlayed) {
// return;
// }
});
$("#course_video").bind('webkitfullscreenchange mozfullscreenchange fullscreenchange',
function(event) {
console.log('full screen ', event);
var state = document.fullscreenElement ||
document.webkitFullscreenElement ||
document.mozFullScreenElement ||
document.msFullscreenElement;
if (state !== undefined) {
scope.showVideo = true;
} else {
scope.showVideo = false;
}
scope.$apply();
});
});
}
});<|fim▁end|>
| |
<|file_name|>agg_run.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (C) 2015 Swift Navigation Inc.
# Contact: Ian Horn <[email protected]>
#
# This source is subject to the license found in the file 'LICENSE' which must
# be be distributed together with this source. All other rights reserved.
#
# THIS CODE AND INFORMATION IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND,
# EITHER EXPRESSED OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
from gnss_analysis.runner import run as single_run
import pandas as pd
import numpy as np
def main():
import argparse
parser = argparse.ArgumentParser(description='RTK Filter SITL tests.')
parser.add_argument('infile', help='Specify the HDF5 file to use for input.')
parser.add_argument('outfile', help='Specify the HDF5 file to output into.')
parser.add_argument('baselineX', help='The baseline north component.')
parser.add_argument('baselineY', help='The baseline east component.')
parser.add_argument('baselineZ', help='The baseline down component.')
parser.add_argument('--NED', action='store_true')
parser.add_argument('-k', '--key',
default='table', nargs=1,
help='The key for the output table to insert into.')
parser.add_argument('-r', '--row',
default=None, nargs=1,
help='The key for the output table to insert into.')
args = parser.parse_args()
hdf5_filename_in = args.infile
hdf5_filename_out = args.outfile
baselineX = args.baselineX
baselineY = args.baselineY
baselineZ = args.baselineZ
baseline = np.array(map(float, [baselineX, baselineY, baselineZ]))
out_key = args.key<|fim▁hole|> row = args.row
if row is None:
row = hdf5_filename_in
reports = single_run(hdf5_filename_in, baseline, baseline_is_NED=args.NED)
out_store = pd.HDFStore(hdf5_filename_out)
if ('/' + out_key) in out_store.keys():
out_df = out_store[out_key]
else:
out_df = pd.DataFrame()
new_cols = [col for col in reports.keys() if col not in out_df.columns]
for new_col in new_cols:
out_df[new_col] = pd.Series(np.nan * np.empty_like(out_df.index),
index=out_df.index)
out_df.loc[row] = pd.Series(reports)
out_store[out_key] = out_df
out_store.close()
if __name__ == "__main__":
main()<|fim▁end|>
| |
<|file_name|>markdown.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use collections::HashSet;
use std::{str, io};
use std::strbuf::StrBuf;
use getopts;
use testing;
use html::escape::Escape;
use html::markdown::{MarkdownWithToc, find_testable_code, reset_headers};
use test::Collector;
fn load_string(input: &Path) -> io::IoResult<Option<~str>> {
let mut f = try!(io::File::open(input));
let d = try!(f.read_to_end());
Ok(str::from_utf8(d.as_slice()).map(|s| s.to_owned()))
}
macro_rules! load_or_return {
($input: expr, $cant_read: expr, $not_utf8: expr) => {
{<|fim▁hole|> let _ = writeln!(&mut io::stderr(),
"error reading `{}`: {}", input.display(), e);
return $cant_read;
}
Ok(None) => {
let _ = writeln!(&mut io::stderr(),
"error reading `{}`: not UTF-8", input.display());
return $not_utf8;
}
Ok(Some(s)) => s
}
}
}
}
/// Separate any lines at the start of the file that begin with `%`.
fn extract_leading_metadata<'a>(s: &'a str) -> (Vec<&'a str>, &'a str) {
let mut metadata = Vec::new();
for line in s.lines() {
if line.starts_with("%") {
// remove %<whitespace>
metadata.push(line.slice_from(1).trim_left())
} else {
let line_start_byte = s.subslice_offset(line);
return (metadata, s.slice_from(line_start_byte));
}
}
// if we're here, then all lines were metadata % lines.
(metadata, "")
}
fn load_external_files(names: &[~str]) -> Option<~str> {
let mut out = StrBuf::new();
for name in names.iter() {
out.push_str(load_or_return!(name.as_slice(), None, None));
out.push_char('\n');
}
Some(out.into_owned())
}
/// Render `input` (e.g. "foo.md") into an HTML file in `output`
/// (e.g. output = "bar" => "bar/foo.html").
pub fn render(input: &str, mut output: Path, matches: &getopts::Matches) -> int {
let input_p = Path::new(input);
output.push(input_p.filestem().unwrap());
output.set_extension("html");
let mut css = StrBuf::new();
for name in matches.opt_strs("markdown-css").iter() {
let s = format!("<link rel=\"stylesheet\" type=\"text/css\" href=\"{}\">\n", name);
css.push_str(s)
}
let input_str = load_or_return!(input, 1, 2);
let (in_header, before_content, after_content) =
match (load_external_files(matches.opt_strs("markdown-in-header")
.as_slice()),
load_external_files(matches.opt_strs("markdown-before-content")
.as_slice()),
load_external_files(matches.opt_strs("markdown-after-content")
.as_slice())) {
(Some(a), Some(b), Some(c)) => (a,b,c),
_ => return 3
};
let mut out = match io::File::create(&output) {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error opening `{}` for writing: {}",
output.display(), e);
return 4;
}
Ok(f) => f
};
let (metadata, text) = extract_leading_metadata(input_str);
if metadata.len() == 0 {
let _ = writeln!(&mut io::stderr(),
"invalid markdown file: expecting initial line with `% ...TITLE...`");
return 5;
}
let title = metadata.get(0).as_slice();
reset_headers();
let err = write!(
&mut out,
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="generator" content="rustdoc">
<title>{title}</title>
{css}
{in_header}
</head>
<body>
<!--[if lte IE 8]>
<div class="warning">
This old browser is unsupported and will most likely display funky
things.
</div>
<![endif]-->
{before_content}
<h1 class="title">{title}</h1>
{text}
{after_content}
</body>
</html>"#,
title = Escape(title),
css = css,
in_header = in_header,
before_content = before_content,
text = MarkdownWithToc(text),
after_content = after_content);
match err {
Err(e) => {
let _ = writeln!(&mut io::stderr(),
"error writing to `{}`: {}",
output.display(), e);
6
}
Ok(_) => 0
}
}
/// Run any tests/code examples in the markdown file `input`.
pub fn test(input: &str, libs: HashSet<Path>, mut test_args: Vec<~str>) -> int {
let input_str = load_or_return!(input, 1, 2);
let mut collector = Collector::new(input.to_owned(), libs, true, true);
find_testable_code(input_str, &mut collector);
test_args.unshift(~"rustdoctest");
testing::test_main(test_args.as_slice(), collector.tests);
0
}<|fim▁end|>
|
let input = Path::new($input);
match load_string(&input) {
Err(e) => {
|
<|file_name|>ssa-sra-1.C<|end_file_name|><|fim▁begin|>/* { dg-do compile } */
/* { dg-options "-O1 -fdump-tree-optimized" } */
void link_error();
<|fim▁hole|> inline State(){p0=0;p1=0;p2=0;}
inline State(const State &s) {
p0 = s.p0;
p1 = s.p1;
p2 = s.p2;
}
inline void operator =(const State &s) {
p0 = s.p0;
p1 = s.p1;
p2 = s.p2;
}
inline void step(void) {
p0 = p1+p2;
p1 = p0*p1+p2;
p2 = p0-p2;
}
};
inline void iterate_ok(State &inS1, State &inS2, unsigned int n)
{
State s1 = inS1;
for (unsigned int i = 0; i < n; i++) {
s1.step();
}
inS1 = s1;
}
void temp()
{
State s1;
s1.p0 = 0;
s1.p1 = 0;
s1.p2 = 0;
State s2;
s2.p0 = 0;
s2.p1 = 0;
s2.p2 = 0;
iterate_ok (s1, s2, 1);
if (s1.p0)
link_error();
if (s1.p0)
link_error();
if (s1.p0)
link_error();
}
/* We should have removed the casts from pointers to references and caused SRA to happen. */
/* { dg-final { scan-tree-dump-times "link_error" 0 "optimized"} } */
/* { dg-final { cleanup-tree-dump "optimized" } } */<|fim▁end|>
|
struct State {
int p0, p1, p2;
|
<|file_name|>testSpec.js<|end_file_name|><|fim▁begin|>define(['Scripts/App/CommentsScraper'], function(CommentsScraper) {
describe('myFilter', function() {
var failTest = function(error) {
expect(error).toBeUndefined();
};
beforeEach(function(){
jasmine.addMatchers({
toBeEqualComment: function() {
return {
compare: function (actual, expected) {
return {
pass: actual.author === expected.author && actual.commentType === expected.commentType
&& actual.link === expected.link && actual.x === expected.x && actual.y === expected.y
};
}
};
}
});
jasmine.getFixtures().fixturesPath = 'base/Tests/Fixtures/';
loadFixtures('sampleComments.html');
});
it('should give correct comment object when given correct comment html', function () {
var expected = {
author: 'Vaniver',
commentType: 'Parent',
link: 'http://lesswrong.com/lw/n93/open_thread_feb_01_feb_07_2016/d2us',
x: 1454336014000,
y: 2
};
//var employee = CommentsScraper.getCommentObj($('.comment')[0].outerHTML);
//expect(employee.author).toBeEqualComment('ScottL');
});
// Our first test!!!!
it('about greeting says "This is the about message2!"', function () {
<|fim▁hole|> //console.log(CommentsScraper);
//CommentsScraper.getCommentData($('.comment')[0].outerHTML).then(function(data) {
// console.log(data);
//});
});
});
});<|fim▁end|>
| |
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>// Preprocessor Directives
#define STB_IMAGE_IMPLEMENTATION
// Local Headers
#include "glitter.hpp"
#include "shader.h"
#include "camera.h"
// Console Color
#include "consoleColor.hpp"
// System Headers
#include <glad/glad.h>
#include <GLFW/glfw3.h>
#include <glm/gtc/matrix_transform.hpp>
// Standard Headers
//#include <cstdio><|fim▁hole|>//#include <cstdlib>
#include <iostream>
// ÉùÃ÷°´¼üº¯Êý
void key_callback(GLFWwindow* window, int key, int scancode, int action, int mode);
void do_movement();
void mouse_callback(GLFWwindow* window, double xpos, double ypos);
void scroll_callback(GLFWwindow* window, double xoffset, double yoffset);
// Ïà»ú
Camera camera(glm::vec3(0.0f, 0.0f, 3.0f));
bool keys[1024];
GLfloat lastX = 400, lastY = 300;
bool firstMouse = true;
// ʱ¼äÔöÁ¿Deltatime
GLfloat deltaTime = 0.0f; // Time between current frame and last frame
GLfloat lastFrame = 0.0f; // Time of last frame
int main(int argc, char * argv[]) {
// glfw³õʼ»¯
glfwInit();
glfwWindowHint(GLFW_CONTEXT_VERSION_MAJOR, 3);
glfwWindowHint(GLFW_CONTEXT_VERSION_MINOR, 3);
glfwWindowHint(GLFW_OPENGL_PROFILE, GLFW_OPENGL_CORE_PROFILE);
glfwWindowHint(GLFW_OPENGL_FORWARD_COMPAT, GL_TRUE); // ´ËÐÐÓÃÀ´¸øMac OS Xϵͳ×ö¼æÈÝ
glfwWindowHint(GLFW_RESIZABLE, GL_FALSE);
// ´´½¨´°¿Ú,»ñÈ¡´°¿ÚÉÏÉÏÏÂÎÄ
GLFWwindow* window = glfwCreateWindow(mWidth, mHeight, "LearnOpenGL", nullptr, nullptr);
if (window == nullptr)
{
std::cout << "Failed to create GLFW window" << std::endl;
glfwTerminate();
return -1;
}
glfwMakeContextCurrent(window);
// ͨ¹ýglfw×¢²áʼþ»Øµ÷
glfwSetKeyCallback(window, key_callback);
glfwSetCursorPosCallback(window, mouse_callback);
glfwSetScrollCallback(window, scroll_callback);
// Load OpenGL Functions
gladLoadGL();
// fprintf(stderr, "OpenGL %s\n", glGetString(GL_VERSION));
std::cout << BLUE << "OpenGL " << glGetString(GL_VERSION) << RESET << std::endl;
// ²éѯGPU×î´óÖ§³Ö¶¥µã¸öÊý
// GLint nrAttributes;
// glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &nrAttributes);
// std::cout << GREEN << "Maximum nr of vertex attributes supported: " << nrAttributes << RESET << std::endl;
// »ñÈ¡ÊÓ¿Ú
int width, height;
glfwGetFramebufferSize(window, &width, &height);
glViewport(0, 0, width, height);
// ÉèÖÃOpenGL¿ÉÑ¡Ïî
glEnable(GL_DEPTH_TEST); // ¿ªÆôÉî¶È²âÊÔ
glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED);
// ±àÒë×ÅÉ«Æ÷³ÌÐò
Shader ourShader("vert.vert", "frag.frag");
// ¶¥µãÊäÈë
GLfloat vertices[] = {
4.77E-09,1.20595,-0.03172,
-4.77E-09,1.21835,-0.03041,
0.0203,1.20699,-0.02182,
0.0203,1.20699,-0.02182,
-4.77E-09,1.21835,-0.03041,
0.01984,1.2199,-0.02031,
0.02003,1.23105,-0.02186,
0.02721,1.23366,-0.00837,
0.01984,1.2199,-0.02031,
0.01984,1.2199,-0.02031,
0.02721,1.23366,-0.00837,
0.02645,1.21863,-0.00141,
-4.77E-09,1.21835,-0.03041,
0,1.22734,-0.03069,
0.01984,1.2199,-0.02031,
0.01984,1.2199,-0.02031,
0,1.22734,-0.03069,
0.02003,1.23105,-0.02186,
0.01468,1.22309,0.01994,
-4.77E-09,1.21742,0.02325,
0.01567,1.21306,0.01439,
0.01567,1.21306,0.01439,
-4.77E-09,1.21742,0.02325,
0,1.20973,0.0205,
0,1.1706,0.01972,
0.01756,1.17823,0.0107,
-9.54E-09,1.19759,0.01869,
-9.54E-09,1.19759,0.01869,
0.01756,1.17823,0.0107,
0.01641,1.19993,0.01229,
0.01756,1.17823,0.0107,
0.02891,1.19263,-0.00685,
0.01641,1.19993,0.01229,
0.01641,1.19993,0.01229,
0.02891,1.19263,-0.00685,
0.02695,1.20501,-0.00425,
0.02695,1.20501,-0.00425,
0.02891,1.19263,-0.00685,
0.0203,1.20699,-0.02182,
0.0203,1.20699,-0.02182,
0.02891,1.19263,-0.00685,
0.02235,1.18587,-0.02614,
9.55E-09,1.1844,-0.03646,
4.77E-09,1.20595,-0.03172,
0.02235,1.18587,-0.02614,
0.02235,1.18587,-0.02614,
4.77E-09,1.20595,-0.03172,
0.0203,1.20699,-0.02182,
0.01567,1.21306,0.01439,
0,1.20973,0.0205,
0.01641,1.19993,0.01229,
0.01641,1.19993,0.01229,
0,1.20973,0.0205,
-9.54E-09,1.19759,0.01869,
0.0203,1.20699,-0.02182,
0.01984,1.2199,-0.02031,
0.02695,1.20501,-0.00425,
0.02695,1.20501,-0.00425,
0.01984,1.2199,-0.02031,
0.02645,1.21863,-0.00141,
0.02645,1.21863,-0.00141,
0.01567,1.21306,0.01439,
0.02695,1.20501,-0.00425,
0.02695,1.20501,-0.00425,
0.01567,1.21306,0.01439,
0.01641,1.19993,0.01229,
-0.01984,1.2199,-0.02031,
-4.77E-09,1.21835,-0.03041,
-0.0203,1.20699,-0.02182,
-0.0203,1.20699,-0.02182,
-4.77E-09,1.21835,-0.03041,
4.77E-09,1.20595,-0.03172,
-0.02003,1.23105,-0.02186,
-0.01984,1.2199,-0.02031,
-0.02721,1.23366,-0.00837,
-0.02721,1.23366,-0.00837,
-0.01984,1.2199,-0.02031,
-0.02645,1.21863,-0.00141,
-4.77E-09,1.21835,-0.03041,
-0.01984,1.2199,-0.02031,
0,1.22734,-0.03069,
0,1.22734,-0.03069,
-0.01984,1.2199,-0.02031,
-0.02003,1.23105,-0.02186,
0,1.20973,0.0205,
-4.77E-09,1.21742,0.02325,
-0.01567,1.21306,0.01439,
-0.01567,1.21306,0.01439,
-4.77E-09,1.21742,0.02325,
-0.01468,1.22309,0.01994,
-0.01641,1.19993,0.01229,
-0.01756,1.17823,0.0107,
-9.54E-09,1.19759,0.01869,
-9.54E-09,1.19759,0.01869,
-0.01756,1.17823,0.0107,
0,1.1706,0.01972,
-0.01756,1.17823,0.0107,
-0.01641,1.19993,0.01229,
-0.02891,1.19263,-0.00685,
-0.02891,1.19263,-0.00685,
-0.01641,1.19993,0.01229,
-0.02695,1.20501,-0.00425,
-0.02235,1.18587,-0.02614,
-0.02891,1.19263,-0.00685,
-0.0203,1.20699,-0.02182,
-0.0203,1.20699,-0.02182,
-0.02891,1.19263,-0.00685,
-0.02695,1.20501,-0.00425,
-0.0203,1.20699,-0.02182,
4.77E-09,1.20595,-0.03172,
-0.02235,1.18587,-0.02614,
-0.02235,1.18587,-0.02614,
4.77E-09,1.20595,-0.03172,
9.55E-09,1.1844,-0.03646,
-9.54E-09,1.19759,0.01869,
0,1.20973,0.0205,
-0.01641,1.19993,0.01229,
-0.01641,1.19993,0.01229,
0,1.20973,0.0205,
-0.01567,1.21306,0.01439,
-0.0203,1.20699,-0.02182,
-0.02695,1.20501,-0.00425,
-0.01984,1.2199,-0.02031,
-0.01984,1.2199,-0.02031,
-0.02695,1.20501,-0.00425,
-0.02645,1.21863,-0.00141,
-0.01641,1.19993,0.01229,
-0.01567,1.21306,0.01439,
-0.02695,1.20501,-0.00425,
-0.02695,1.20501,-0.00425,
-0.01567,1.21306,0.01439,
-0.02645,1.21863,-0.00141,
0.01567,1.21306,0.01439,
0.02503,1.23017,0.00403,
0.01468,1.22309,0.01994,
0.01567,1.21306,0.01439,
0.02645,1.21863,-0.00141,
0.02503,1.23017,0.00403,
0.02503,1.23017,0.00403,
0.02645,1.21863,-0.00141,
0.02721,1.23366,-0.00837,
-0.01567,1.21306,0.01439,
-0.01468,1.22309,0.01994,
-0.02503,1.23017,0.00403,
-0.01567,1.21306,0.01439,
-0.02503,1.23017,0.00403,
-0.02645,1.21863,-0.00141,
-0.02503,1.23017,0.00403,
-0.02721,1.23366,-0.00837,
-0.02645,1.21863,-0.00141,
0.03139,1.3603,0.07762,
0.04587,1.36008,0.07223,
0.03149,1.37552,0.0749,
0.03149,1.37552,0.0749,
0.04587,1.36008,0.07223,
0.04652,1.37396,0.06917,
0.07023,1.27958,0.03767,
0.0723,1.28971,0.038,
0.06858,1.27965,0.0514,
0.06858,1.27965,0.0514,
0.0723,1.28971,0.038,
0.07096,1.29082,0.0514,
0.01638,1.25964,0.08754,
0.01459,1.25007,0.08622,
0.02834,1.25692,0.08309,
0.02834,1.25692,0.08309,
0.01459,1.25007,0.08622,
0.02647,1.2487,0.08191,
0.0061,1.21616,0.06352,
0.0124,1.21815,0.06233,
0.00641,1.21884,0.06702,
0.00641,1.21884,0.06702,
0.0124,1.21815,0.06233,
0.01268,1.2208,0.0666,
0.01261,1.21771,0.05502,
0.0124,1.21815,0.06233,
0.00648,1.21591,0.05586,
0.00648,1.21591,0.05586,
0.0124,1.21815,0.06233,
0.0061,1.21616,0.06352,
0.05295,1.24491,0.06126,
0.05363,1.24466,0.05153,
0.05795,1.25184,0.06138,
0.05795,1.25184,0.06138,
0.05363,1.24466,0.05153,
0.0591,1.25166,0.05157,
0.02235,1.22209,0.06108,
0.02252,1.2218,0.05414,
0.03282,1.22791,0.0613,
0.03282,1.22791,0.0613,
0.02252,1.2218,0.05414,
0.0333,1.22787,0.05316,
0.02252,1.2218,0.05414,
0.02235,1.22209,0.06108,
0.01261,1.21771,0.05502,
0.01261,1.21771,0.05502,
0.02235,1.22209,0.06108,
0.0124,1.21815,0.06233,
0.02894,1.22811,0.0672,
0.03093,1.22799,0.06463,
0.0362,1.23491,0.06981,
0.0362,1.23491,0.06981,
0.03093,1.22799,0.06463,
0.03978,1.23434,0.06523,
0.05434,1.25252,0.06685,
0.04685,1.25365,0.07409,
0.05024,1.24555,0.06635,
0.05024,1.24555,0.06635,
0.04685,1.25365,0.07409,
0.04379,1.24654,0.07316,
0.0735,1.28922,0.02501,
0.0723,1.28971,0.038,
0.07095,1.27964,0.02588,
0.07095,1.27964,0.02588,
0.0723,1.28971,0.038,
0.07023,1.27958,0.03767,
0.05134,1.24564,0.01876,
0.0536,1.24468,0.02831,
0.04519,1.2403,0.02048,
0.04519,1.2403,0.02048,
0.0536,1.24468,0.02831,
0.04799,1.23945,0.02951,
0.0595,1.25156,0.0393,
0.05957,1.25153,0.02703,
0.06331,1.25887,0.03872,
0.06331,1.25887,0.03872,
0.05957,1.25153,0.02703,
0.06318,1.25883,0.02643,
0.05957,1.25153,0.02703,
0.0595,1.25156,0.0393,
0.0536,1.24468,0.02831,
0.0536,1.24468,0.02831,
0.0595,1.25156,0.0393,
0.05402,1.24445,0.04027,
0.04692,1.24657,0.00995,
0.05342,1.25298,0.00928,
0.05134,1.24564,0.01876,
0.05134,1.24564,0.01876,
0.05342,1.25298,0.00928,
0.05723,1.2522,0.01787,
0.04194,1.23381,0.04234,
0.04066,1.2338,0.03086,
0.04878,1.23893,0.04124,
0.04878,1.23893,0.04124,
0.04066,1.2338,0.03086,
0.04799,1.23945,0.02951,
0.07095,1.27964,0.02588,
0.07023,1.27958,0.03767,
0.06646,1.26648,0.02626,
0.06646,1.26648,0.02626,
0.07023,1.27958,0.03767,
0.06571,1.26606,0.03842,
0.02345,1.22239,0.04425,
0.02252,1.2218,0.05414,
0.01466,1.21907,0.0448,
0.01466,1.21907,0.0448,
0.02252,1.2218,0.05414,
0.01261,1.21771,0.05502,
0.04206,1.2795,0.07332,
0.03143,1.28117,0.07577,
0.04158,1.27434,0.07525,
0.04158,1.27434,0.07525,
0.03143,1.28117,0.07577,
0.03085,1.27622,0.0786,
0.06246,1.25878,0.0519,
0.06331,1.25887,0.03872,
0.06464,1.26573,0.05144,
0.06464,1.26573,0.05144,
0.06331,1.25887,0.03872,
0.06571,1.26606,0.03842,
0.05705,1.25905,0.06705,
0.04882,1.25987,0.07409,
0.05434,1.25252,0.06685,
0.05434,1.25252,0.06685,
0.04882,1.25987,0.07409,
0.04685,1.25365,0.07409,
0.00896,1.28062,0.0836,
0.0086,1.28722,0.08112,
0,1.28235,0.08499,
0,1.28235,0.08499,
0.0086,1.28722,0.08112,
0,1.2891,0.08302,
0.04061,1.26738,0.07744,
0.03032,1.26928,0.08152,
0.03952,1.26106,0.0782,
0.03952,1.26106,0.0782,
0.03032,1.26928,0.08152,
0.02943,1.26329,0.08302,
0.00949,1.26841,0.09069,
0.0179,1.26613,0.08729,
0.00942,1.27385,0.08785,
0.00942,1.27385,0.08785,
0.0179,1.26613,0.08729,
0.01921,1.27166,0.0853,
0.01254,1.35949,0.0839,
0.03139,1.3603,0.07762,
0.01352,1.37449,0.0805,
0.01352,1.37449,0.0805,
0.03139,1.3603,0.07762,
0.03149,1.37552,0.0749,
0.02173,1.31709,0.07831,
0.03247,1.32076,0.0765,
0.02158,1.32687,0.07898,
0.02158,1.32687,0.07898,
0.03247,1.32076,0.0765,
0.0331,1.33154,0.07653,
0.06019,1.27317,0.06641,
0.06462,1.27315,0.06051,
0.05984,1.28019,0.06589,
0.05984,1.28019,0.06589,
0.06462,1.27315,0.06051,
0.06572,1.2798,0.06003,
0.04519,1.23945,0.06554,
0.04016,1.2401,0.07158,
0.03978,1.23434,0.06523,
0.03978,1.23434,0.06523,
0.04016,1.2401,0.07158,
0.0362,1.23491,0.06981,
0.02432,1.24212,0.08008,
0.02647,1.2487,0.08191,
0.01386,1.2431,0.08354,
0.01386,1.2431,0.08354,
0.02647,1.2487,0.08191,
0.01459,1.25007,0.08622,
0.04066,1.2338,0.03086,
0.04194,1.23381,0.04234,
0.03154,1.22797,0.03229,
0.03154,1.22797,0.03229,
0.04194,1.23381,0.04234,
0.03257,1.22766,0.04363,
0.04066,1.2338,0.03086,
0.03154,1.22797,0.03229,
0.03809,1.23448,0.02291,
0.03809,1.23448,0.02291,
0.03154,1.22797,0.03229,
0.02885,1.22882,0.02475,
0.0357,1.24758,0.07746,
0.02647,1.2487,0.08191,
0.0329,1.24107,0.07576,
0.0329,1.24107,0.07576,
0.02647,1.2487,0.08191,
0.02432,1.24212,0.08008,
0.03817,1.25507,0.07805,
0.02834,1.25692,0.08309,
0.0357,1.24758,0.07746,
0.0357,1.24758,0.07746,
0.02834,1.25692,0.08309,
0.02647,1.2487,0.08191,
0.03085,1.27622,0.0786,
0.03143,1.28117,0.07577,
0.02007,1.27859,0.08134,
0.02007,1.27859,0.08134,
0.03143,1.28117,0.07577,
0.02062,1.28425,0.07855,
0.02834,1.25692,0.08309,
0.02943,1.26329,0.08302,
0.01638,1.25964,0.08754,
0.01638,1.25964,0.08754,
0.02943,1.26329,0.08302,
0.0179,1.26613,0.08729,
0.04611,1.34741,0.07345,
0.03228,1.34693,0.07827,
0.04568,1.34193,0.07366,
0.04568,1.34193,0.07366,
0.03228,1.34693,0.07827,
0.03255,1.34161,0.07752,
0.0086,1.28722,0.08112,
0.00896,1.28062,0.0836,
0.02062,1.28425,0.07855,
0.02062,1.28425,0.07855,
0.00896,1.28062,0.0836,
0.02007,1.27859,0.08134,
0.06877,1.37605,0.04344,
0.0588,1.38544,0.05528,
0.06907,1.36504,0.05375,
0.06907,1.36504,0.05375,
0.0588,1.38544,0.05528,
0.0591,1.37122,0.06388,
0.04651,1.38985,0.06161,
0.03157,1.39284,0.06592,
0.04652,1.37396,0.06917,
0.04652,1.37396,0.06917,
0.03157,1.39284,0.06592,
0.03149,1.37552,0.0749,
0.01352,1.37449,0.0805,
0.03149,1.37552,0.0749,
0.01383,1.39434,0.07041,
0.01383,1.39434,0.07041,
0.03149,1.37552,0.0749,
0.03157,1.39284,0.06592,
0.00942,1.27385,0.08785,
0.00896,1.28062,0.0836,
0,1.27583,0.08966,
0,1.27583,0.08966,
0.00896,1.28062,0.0836,
0,1.28235,0.08499,
0.00896,1.28062,0.0836,
0.00942,1.27385,0.08785,
0.02007,1.27859,0.08134,
0.02007,1.27859,0.08134,
0.00942,1.27385,0.08785,
0.01921,1.27166,0.0853,
0.03085,1.27622,0.0786,
0.02007,1.27859,0.08134,
0.03032,1.26928,0.08152,
0.03032,1.26928,0.08152,
0.02007,1.27859,0.08134,
0.01921,1.27166,0.0853,
0.04158,1.27434,0.07525,
0.03085,1.27622,0.0786,
0.04061,1.26738,0.07744,
0.04061,1.26738,0.07744,
0.03085,1.27622,0.0786,
0.03032,1.26928,0.08152,
0.05919,1.26579,0.06679,
0.06294,1.26572,0.06097,
0.06019,1.27317,0.06641,
0.06019,1.27317,0.06641,
0.06294,1.26572,0.06097,
0.06462,1.27315,0.06051,
0.05196,1.27963,0.0702,
0.04206,1.2795,0.07332,
0.05151,1.27351,0.07187,
0.05151,1.27351,0.07187,
0.04206,1.2795,0.07332,
0.04158,1.27434,0.07525,
0.05151,1.27351,0.07187,
0.04158,1.27434,0.07525,
0.05076,1.26639,0.07326,
0.05076,1.26639,0.07326,
0.04158,1.27434,0.07525,
0.04061,1.26738,0.07744,
0.0723,1.28971,0.038,
0.07385,1.29993,0.03892,
0.07096,1.29082,0.0514,
0.07096,1.29082,0.0514,
0.07385,1.29993,0.03892,
0.07238,1.30016,0.05146,
0.0735,1.28922,0.02501,
0.07524,1.29924,0.02406,
0.0723,1.28971,0.038,
0.0723,1.28971,0.038,
0.07524,1.29924,0.02406,
0.07385,1.29993,0.03892,
0.04222,1.28399,0.07212,
0.04321,1.30286,0.07431,
0.03165,1.28557,0.07493,
0.03165,1.28557,0.07493,
0.04321,1.30286,0.07431,
0.03212,1.3029,0.07701,
0.02172,1.30279,0.07802,
0.02113,1.28909,0.07723,
0.03212,1.3029,0.07701,
0.03212,1.3029,0.07701,
0.02113,1.28909,0.07723,
0.03165,1.28557,0.07493,
0.05192,1.28491,0.06885,
0.04222,1.28399,0.07212,
0.05196,1.27963,0.0702,
0.05196,1.27963,0.0702,
0.04222,1.28399,0.07212,
0.04206,1.2795,0.07332,
0.03165,1.28557,0.07493,
0.03143,1.28117,0.07577,
0.04222,1.28399,0.07212,
0.04222,1.28399,0.07212,
0.03143,1.28117,0.07577,
0.04206,1.2795,0.07332,
0.00832,1.29269,0.08068,
0.0086,1.28722,0.08112,
0.02113,1.28909,0.07723,
0.02113,1.28909,0.07723,
0.0086,1.28722,0.08112,
0.02062,1.28425,0.07855,
0.00641,1.21884,0.06702,
0.01268,1.2208,0.0666,
0.00625,1.22817,0.07456,
0.00625,1.22817,0.07456,
0.01268,1.2208,0.0666,
0.0123,1.22821,0.07384,
0.0735,1.28922,0.02501,
0.07406,1.28893,0.01498,
0.07524,1.29924,0.02406,
0.07524,1.29924,0.02406,
0.07406,1.28893,0.01498,
0.07573,1.29881,0.01441,
0.06318,1.25883,0.02643,
0.05957,1.25153,0.02703,
0.0609,1.25604,0.01715,
0.0609,1.25604,0.01715,
0.05957,1.25153,0.02703,
0.05723,1.2522,0.01787,
0.04519,1.2403,0.02048,
0.04799,1.23945,0.02951,
0.03809,1.23448,0.02291,
0.03809,1.23448,0.02291,
0.04799,1.23945,0.02951,
0.04066,1.2338,0.03086,
0.04379,1.24654,0.07316,
0.0357,1.24758,0.07746,
0.04016,1.2401,0.07158,
0.04016,1.2401,0.07158,
0.0357,1.24758,0.07746,
0.0329,1.24107,0.07576,
0.04685,1.25365,0.07409,
0.03817,1.25507,0.07805,
0.04379,1.24654,0.07316,
0.04379,1.24654,0.07316,
0.03817,1.25507,0.07805,
0.0357,1.24758,0.07746,
0.03952,1.26106,0.0782,
0.04882,1.25987,0.07409,
0.04061,1.26738,0.07744,
0.04061,1.26738,0.07744,
0.04882,1.25987,0.07409,
0.05076,1.26639,0.07326,
0.0362,1.23491,0.06981,
0.02972,1.23563,0.07368,
0.02894,1.22811,0.0672,
0.02894,1.22811,0.0672,
0.02972,1.23563,0.07368,
0.02368,1.22846,0.07047,
0.06646,1.26648,0.02626,
0.06571,1.26606,0.03842,
0.06318,1.25883,0.02643,
0.06318,1.25883,0.02643,
0.06571,1.26606,0.03842,
0.06331,1.25887,0.03872,
0.05919,1.26579,0.06679,
0.05076,1.26639,0.07326,
0.05705,1.25905,0.06705,
0.05705,1.25905,0.06705,
0.05076,1.26639,0.07326,
0.04882,1.25987,0.07409,
0.02943,1.26329,0.08302,
0.02834,1.25692,0.08309,
0.03952,1.26106,0.0782,
0.03952,1.26106,0.0782,
0.02834,1.25692,0.08309,
0.03817,1.25507,0.07805,
0.03032,1.26928,0.08152,
0.01921,1.27166,0.0853,
0.02943,1.26329,0.08302,
0.02943,1.26329,0.08302,
0.01921,1.27166,0.0853,
0.0179,1.26613,0.08729,
0.03817,1.25507,0.07805,
0.04685,1.25365,0.07409,
0.03952,1.26106,0.0782,
0.03952,1.26106,0.0782,
0.04685,1.25365,0.07409,
0.04882,1.25987,0.07409,
0.0536,1.24468,0.02831,
0.05402,1.24445,0.04027,
0.04799,1.23945,0.02951,
0.04799,1.23945,0.02951,
0.05402,1.24445,0.04027,
0.04878,1.23893,0.04124,
0.05363,1.24466,0.05153,
0.05295,1.24491,0.06126,
0.04843,1.23908,0.05175,
0.04843,1.23908,0.05175,
0.05295,1.24491,0.06126,
0.04773,1.23921,0.06125,
0.05024,1.24555,0.06635,
0.04379,1.24654,0.07316,
0.04519,1.23945,0.06554,
0.04519,1.23945,0.06554,
0.04379,1.24654,0.07316,
0.04016,1.2401,0.07158,
0.01315,1.23656,0.08006,
0.02085,1.23626,0.07781,
0.01386,1.2431,0.08354,
0.01386,1.2431,0.08354,
0.02085,1.23626,0.07781,
0.02432,1.24212,0.08008,
0.02085,1.23626,0.07781,
0.02972,1.23563,0.07368,
0.02432,1.24212,0.08008,
0.02432,1.24212,0.08008,
0.02972,1.23563,0.07368,
0.0329,1.24107,0.07576,
0.02972,1.23563,0.07368,
0.0362,1.23491,0.06981,
0.0329,1.24107,0.07576,
0.0329,1.24107,0.07576,
0.0362,1.23491,0.06981,
0.04016,1.2401,0.07158,
0.05705,1.25905,0.06705,
0.06076,1.25889,0.06133,
0.05919,1.26579,0.06679,
0.05919,1.26579,0.06679,
0.06076,1.25889,0.06133,
0.06294,1.26572,0.06097,
0.05434,1.25252,0.06685,
0.05795,1.25184,0.06138,
0.05705,1.25905,0.06705,
0.05705,1.25905,0.06705,
0.05795,1.25184,0.06138,
0.06076,1.25889,0.06133,
0.01459,1.25007,0.08622,
0.01638,1.25964,0.08754,
0.0076,1.25064,0.088,
0.0076,1.25064,0.088,
0.01638,1.25964,0.08754,
0.00879,1.26171,0.09069,
0,1.21802,0.06771,
0,1.21473,0.06443,
0.00641,1.21884,0.06702,
0.00641,1.21884,0.06702,
0,1.21473,0.06443,
0.0061,1.21616,0.06352,
0,1.21473,0.06443,
0,1.21446,0.05669,
0.0061,1.21616,0.06352,
0.0061,1.21616,0.06352,
0,1.21446,0.05669,
0.00648,1.21591,0.05586,
0.00648,1.21591,0.05586,
0.00723,1.21695,0.04555,
0.01261,1.21771,0.05502,
0.01261,1.21771,0.05502,
0.00723,1.21695,0.04555,
0.01466,1.21907,0.0448,
0.0123,1.22821,0.07384,
0.01315,1.23656,0.08006,
0.00625,1.22817,0.07456,
0.00625,1.22817,0.07456,
0.01315,1.23656,0.08006,
0.00675,1.23669,0.08119,
0.01315,1.23656,0.08006,
0.01386,1.2431,0.08354,
0.00675,1.23669,0.08119,
0.00675,1.23669,0.08119,
0.01386,1.2431,0.08354,
0.00689,1.24354,0.08499,
0,1.22807,0.07536,
0,1.21802,0.06771,
0.00625,1.22817,0.07456,
0.00625,1.22817,0.07456,
0,1.21802,0.06771,
0.00641,1.21884,0.06702,
0.01478,1.22193,0.02696,
0.01477,1.22046,0.03429,
-9.54E-09,1.21815,0.02846,
-9.54E-09,1.21815,0.02846,
0.01477,1.22046,0.03429,
0,1.21761,0.03547,
0.01386,1.2431,0.08354,
0.01459,1.25007,0.08622,
0.00689,1.24354,0.08499,
0.00689,1.24354,0.08499,
0.01459,1.25007,0.08622,
0.0076,1.25064,0.088,
0.00832,1.29269,0.08068,
0.00805,1.30202,0.08029,
0,1.29425,0.08259,
0,1.29425,0.08259,
0.00805,1.30202,0.08029,
0,1.30203,0.08279,
0.00879,1.26171,0.09069,
0.00949,1.26841,0.09069,
0,1.26362,0.09341,
0,1.26362,0.09341,
0.00949,1.26841,0.09069,
0,1.27174,0.09502,
0.00625,1.22817,0.07456,
0.00675,1.23669,0.08119,
0,1.22807,0.07536,
0,1.22807,0.07536,
0.00675,1.23669,0.08119,
0,1.23682,0.08214,
0.00675,1.23669,0.08119,
0.00689,1.24354,0.08499,
0,1.23682,0.08214,
0,1.23682,0.08214,
0.00689,1.24354,0.08499,
0,1.24385,0.08592,
0.00949,1.26841,0.09069,
0.00942,1.27385,0.08785,
0,1.27174,0.09502,
0,1.27174,0.09502,
0.00942,1.27385,0.08785,
0,1.27583,0.08966,
0,1.32051,0.08428,
0.00931,1.32109,0.08153,
0,1.33386,0.08582,
0,1.33386,0.08582,
0.00931,1.32109,0.08153,
0.0113,1.33435,0.08343,
0.01383,1.39434,0.07041,
0,1.39425,0.07381,
0.01352,1.37449,0.0805,
0.01352,1.37449,0.0805,
0,1.39425,0.07381,
0,1.37428,0.08327,
0.01352,1.37449,0.0805,
0,1.37428,0.08327,
0.01254,1.35949,0.0839,
0.01254,1.35949,0.0839,
0,1.37428,0.08327,
0,1.3588,0.08654,
0.0119,1.34577,0.08461,
0,1.34487,0.08687,
0.0113,1.33435,0.08343,
0.0113,1.33435,0.08343,
0,1.34487,0.08687,
0,1.33386,0.08582,
0.0086,1.28722,0.08112,
0.00832,1.29269,0.08068,
0,1.2891,0.08302,
0,1.2891,0.08302,
0.00832,1.29269,0.08068,
0,1.29425,0.08259,
0.01254,1.35949,0.0839,
0,1.3588,0.08654,
0.0119,1.34577,0.08461,
0.0119,1.34577,0.08461,
0,1.3588,0.08654,
0,1.34487,0.08687,
0.00689,1.24354,0.08499,
0.0076,1.25064,0.088,
0,1.24385,0.08592,
0,1.24385,0.08592,
0.0076,1.25064,0.088,
0,1.25112,0.08923,
0,1.30796,0.08302,
0.00789,1.31027,0.08043,
0,1.32051,0.08428,
0,1.32051,0.08428,
0.00789,1.31027,0.08043,
0.00931,1.32109,0.08153,
0.0119,1.34577,0.08461,
0.01861,1.34091,0.08232,
0.03188,1.35226,0.07853,
0.03188,1.35226,0.07853,
0.01861,1.34091,0.08232,
0.03228,1.34693,0.07827,
0.01638,1.25964,0.08754,
0.0179,1.26613,0.08729,
0.00879,1.26171,0.09069,
0.00879,1.26171,0.09069,
0.0179,1.26613,0.08729,
0.00949,1.26841,0.09069,
0.0553,1.32054,0.06849,
0.05697,1.33119,0.0684,
0.04407,1.32169,0.07304,
0.04407,1.32169,0.07304,
0.05697,1.33119,0.0684,
0.04535,1.3333,0.0733,
0.03143,1.28117,0.07577,
0.03165,1.28557,0.07493,
0.02062,1.28425,0.07855,
0.02062,1.28425,0.07855,
0.03165,1.28557,0.07493,
0.02113,1.28909,0.07723,
0.07847,1.32401,0.02407,
0.07697,1.31061,0.02391,
0.07878,1.32355,0.01812,
0.07878,1.32355,0.01812,
0.07697,1.31061,0.02391,
0.0777,1.31122,0.01351,
0.05876,1.35078,0.06779,
0.05924,1.35889,0.06756,
0.04625,1.35276,0.07318,
0.04625,1.35276,0.07318,
0.05924,1.35889,0.06756,
0.04587,1.36008,0.07223,
0.03139,1.3603,0.07762,
0.03188,1.35226,0.07853,
0.04587,1.36008,0.07223,
0.04587,1.36008,0.07223,
0.03188,1.35226,0.07853,
0.04625,1.35276,0.07318,
0.0119,1.34577,0.08461,
0.03188,1.35226,0.07853,
0.01254,1.35949,0.0839,
0.01254,1.35949,0.0839,
0.03188,1.35226,0.07853,
0.03139,1.3603,0.07762,
0.06822,1.34604,0.06187,
0.06715,1.34033,0.06291,
0.07509,1.33874,0.05462,
0.07509,1.33874,0.05462,
0.06715,1.34033,0.06291,
0.07308,1.334,0.0574,
0.05849,1.34513,0.06759,
0.05876,1.35078,0.06779,
0.04611,1.34741,0.07345,
0.04611,1.34741,0.07345,
0.05876,1.35078,0.06779,
0.04625,1.35276,0.07318,
0.03228,1.34693,0.07827,
0.04611,1.34741,0.07345,
0.03188,1.35226,0.07853,
0.03188,1.35226,0.07853,
0.04611,1.34741,0.07345,
0.04625,1.35276,0.07318,
0.03247,1.32076,0.0765,
0.04407,1.32169,0.07304,
0.0331,1.33154,0.07653,
0.0331,1.33154,0.07653,
0.04407,1.32169,0.07304,
0.04535,1.3333,0.0733,
0.06715,1.34033,0.06291,
0.06566,1.33482,0.06396,
0.07308,1.334,0.0574,
0.07308,1.334,0.0574,
0.06566,1.33482,0.06396,
0.07028,1.32903,0.06039,
0.06566,1.33482,0.06396,
0.06281,1.32678,0.0652,
0.07028,1.32903,0.06039,
0.07028,1.32903,0.06039,
0.06281,1.32678,0.0652,
0.06632,1.32308,0.0629,
0.05697,1.33119,0.0684,
0.05815,1.33938,0.06792,
0.04535,1.3333,0.0733,
0.04535,1.3333,0.0733,
0.05815,1.33938,0.06792,
0.04568,1.34193,0.07366,
0.06572,1.2798,0.06003,
0.06858,1.27965,0.0514,
0.06782,1.29159,0.05893,
0.06782,1.29159,0.05893,
0.06858,1.27965,0.0514,
0.07096,1.29082,0.0514,
0.06294,1.26572,0.06097,
0.06464,1.26573,0.05144,
0.06462,1.27315,0.06051,
0.06462,1.27315,0.06051,
0.06464,1.26573,0.05144,
0.0667,1.27327,0.05123,
0.06076,1.25889,0.06133,
0.06246,1.25878,0.0519,
0.06294,1.26572,0.06097,
0.06294,1.26572,0.06097,
0.06246,1.25878,0.0519,
0.06464,1.26573,0.05144,
0.05795,1.25184,0.06138,
0.0591,1.25166,0.05157,
0.06076,1.25889,0.06133,
0.06076,1.25889,0.06133,
0.0591,1.25166,0.05157,
0.06246,1.25878,0.0519,
0.04843,1.23908,0.05175,
0.04773,1.23921,0.06125,
0.04179,1.234,0.05217,
0.04179,1.234,0.05217,
0.04773,1.23921,0.06125,
0.0414,1.23409,0.06121,
0.03282,1.22791,0.0613,
0.0333,1.22787,0.05316,
0.0414,1.23409,0.06121,
0.0414,1.23409,0.06121,
0.0333,1.22787,0.05316,
0.04179,1.234,0.05217,
0.05795,1.25184,0.06138,
0.05434,1.25252,0.06685,
0.05295,1.24491,0.06126,
0.05295,1.24491,0.06126,
0.05434,1.25252,0.06685,
0.05024,1.24555,0.06635,
0.04519,1.23945,0.06554,
0.04773,1.23921,0.06125,
0.05024,1.24555,0.06635,
0.05024,1.24555,0.06635,
0.04773,1.23921,0.06125,
0.05295,1.24491,0.06126,
0.03093,1.22799,0.06463,
0.03282,1.22791,0.0613,
0.03978,1.23434,0.06523,
0.03978,1.23434,0.06523,
0.03282,1.22791,0.0613,
0.0414,1.23409,0.06121,
0.03978,1.23434,0.06523,
0.0414,1.23409,0.06121,
0.04519,1.23945,0.06554,
0.04519,1.23945,0.06554,
0.0414,1.23409,0.06121,
0.04773,1.23921,0.06125,
0.05196,1.27963,0.0702,
0.05984,1.28019,0.06589,
0.05192,1.28491,0.06885,
0.05192,1.28491,0.06885,
0.05984,1.28019,0.06589,
0.0579,1.28663,0.06556,
0.05151,1.27351,0.07187,
0.06019,1.27317,0.06641,
0.05196,1.27963,0.0702,
0.05196,1.27963,0.0702,
0.06019,1.27317,0.06641,
0.05984,1.28019,0.06589,
0.05076,1.26639,0.07326,
0.05919,1.26579,0.06679,
0.05151,1.27351,0.07187,
0.05151,1.27351,0.07187,
0.05919,1.26579,0.06679,
0.06019,1.27317,0.06641,
0.06464,1.26573,0.05144,
0.06571,1.26606,0.03842,
0.0667,1.27327,0.05123,
0.0667,1.27327,0.05123,
0.06571,1.26606,0.03842,
0.07023,1.27958,0.03767,
0.06858,1.27965,0.0514,
0.06572,1.2798,0.06003,
0.0667,1.27327,0.05123,
0.0667,1.27327,0.05123,
0.06572,1.2798,0.06003,
0.06462,1.27315,0.06051,
0.06242,1.29301,0.06351,
0.06782,1.29159,0.05893,
0.06398,1.3,0.06301,
0.06398,1.3,0.06301,
0.06782,1.29159,0.05893,
0.06962,1.3,0.05828,
0.06398,1.3,0.06301,
0.06962,1.3,0.05828,
0.06391,1.30679,0.06338,
0.06391,1.30679,0.06338,
0.06962,1.3,0.05828,
0.06977,1.30839,0.05889,
0.06391,1.30679,0.06338,
0.06977,1.30839,0.05889,
0.06239,1.31339,0.06486,
0.06239,1.31339,0.06486,
0.06977,1.30839,0.05889,
0.06885,1.31663,0.06067,
0.05924,1.35889,0.06756,
0.05876,1.35078,0.06779,
0.07054,1.35545,0.05999,
0.07054,1.35545,0.05999,
0.05876,1.35078,0.06779,
0.06822,1.34604,0.06187,
0.06715,1.34033,0.06291,
0.06822,1.34604,0.06187,
0.05849,1.34513,0.06759,
0.05849,1.34513,0.06759,
0.06822,1.34604,0.06187,
0.05876,1.35078,0.06779,
0.06566,1.33482,0.06396,
0.06715,1.34033,0.06291,
0.05815,1.33938,0.06792,
0.05815,1.33938,0.06792,
0.06715,1.34033,0.06291,
0.05849,1.34513,0.06759,
0.06281,1.32678,0.0652,
0.06566,1.33482,0.06396,
0.05697,1.33119,0.0684,
0.05697,1.33119,0.0684,
0.06566,1.33482,0.06396,
0.05815,1.33938,0.06792,
0.03212,1.3029,0.07701,
0.04321,1.30286,0.07431,
0.03247,1.32076,0.0765,
0.03247,1.32076,0.0765,
0.04321,1.30286,0.07431,
0.04407,1.32169,0.07304,
0.02172,1.30279,0.07802,
0.03212,1.3029,0.07701,
0.02173,1.31709,0.07831,
0.02173,1.31709,0.07831,
0.03212,1.3029,0.07701,
0.03247,1.32076,0.0765,
0.01492,1.3025,0.0785,
0.02172,1.30279,0.07802,
0.01438,1.31283,0.07895,
0.01438,1.31283,0.07895,
0.02172,1.30279,0.07802,
0.02173,1.31709,0.07831,
0.00805,1.30202,0.08029,
0.01492,1.3025,0.0785,
0.00789,1.31027,0.08043,
0.00789,1.31027,0.08043,
0.01492,1.3025,0.0785,
0.01438,1.31283,0.07895,
0,1.30203,0.08279,
0.00805,1.30202,0.08029,
0,1.30796,0.08302,
0,1.30796,0.08302,
0.00805,1.30202,0.08029,
0.00789,1.31027,0.08043,
0.05322,1.30294,0.07019,
0.04321,1.30286,0.07431,
0.05192,1.28491,0.06885,
0.05192,1.28491,0.06885,
0.04321,1.30286,0.07431,
0.04222,1.28399,0.07212,
0.01438,1.31283,0.07895,
0.02173,1.31709,0.07831,
0.00931,1.32109,0.08153,
0.00931,1.32109,0.08153,
0.02173,1.31709,0.07831,
0.02158,1.32687,0.07898,
0.0331,1.33154,0.07653,
0.04535,1.3333,0.0733,
0.03255,1.34161,0.07752,
0.03255,1.34161,0.07752,
0.04535,1.3333,0.0733,
0.04568,1.34193,0.07366,
0.0731,1.30975,0.0524,
0.06977,1.30839,0.05889,
0.07238,1.30016,0.05146,
0.07238,1.30016,0.05146,
0.06977,1.30839,0.05889,
0.06962,1.3,0.05828,
0.06782,1.29159,0.05893,
0.07096,1.29082,0.0514,
0.06962,1.3,0.05828,
0.06962,1.3,0.05828,
0.07096,1.29082,0.0514,
0.07238,1.30016,0.05146,
0.04611,1.34741,0.07345,
0.04568,1.34193,0.07366,
0.05849,1.34513,0.06759,
0.05849,1.34513,0.06759,
0.04568,1.34193,0.07366,
0.05815,1.33938,0.06792,
0.04652,1.37396,0.06917,
0.04587,1.36008,0.07223,
0.0591,1.37122,0.06388,
0.0591,1.37122,0.06388,
0.04587,1.36008,0.07223,
0.05924,1.35889,0.06756,
0.04651,1.38985,0.06161,
0.04652,1.37396,0.06917,
0.0588,1.38544,0.05528,
0.0588,1.38544,0.05528,
0.04652,1.37396,0.06917,
0.0591,1.37122,0.06388,
-9.54E-09,1.21815,0.02846,
-4.77E-09,1.21742,0.02325,
0.01478,1.22193,0.02696,
0.01478,1.22193,0.02696,
-4.77E-09,1.21742,0.02325,
0.01468,1.22309,0.01994,
0.02721,1.23366,-0.00837,
0.02003,1.23105,-0.02186,
0.03316,1.23991,-0.01178,
0.03316,1.23991,-0.01178,
0.02003,1.23105,-0.02186,
0.0228,1.23744,-0.02778,
0,1.23344,-0.03937,
0.0228,1.23744,-0.02778,
0,1.22734,-0.03069,
0,1.22734,-0.03069,
0.0228,1.23744,-0.02778,
0.02003,1.23105,-0.02186,
0.07549,1.35591,0.04042,
0.07662,1.34467,0.04784,
0.0794,1.3395,0.02451,
0.0794,1.3395,0.02451,
0.07662,1.34467,0.04784,
0.07767,1.32552,0.03517,
0.06907,1.36504,0.05375,
0.07054,1.35545,0.05999,
0.07549,1.35591,0.04042,
0.07549,1.35591,0.04042,
0.07054,1.35545,0.05999,
0.07662,1.34467,0.04784,
0.07594,1.31054,0.03424,
0.07767,1.32552,0.03517,
0.07447,1.31052,0.04458,
0.07447,1.31052,0.04458,
0.07767,1.32552,0.03517,
0.07591,1.32547,0.04546,
0.07767,1.32552,0.03517,
0.07594,1.31054,0.03424,
0.07847,1.32401,0.02407,
0.07847,1.32401,0.02407,
0.07594,1.31054,0.03424,
0.07697,1.31061,0.02391,
0.07697,1.31061,0.02391,
0.07594,1.31054,0.03424,
0.07524,1.29924,0.02406,
0.07524,1.29924,0.02406,
0.07594,1.31054,0.03424,
0.07385,1.29993,0.03892,
0.06331,1.25887,0.03872,
0.06246,1.25878,0.0519,
0.0595,1.25156,0.0393,
0.0595,1.25156,0.0393,
0.06246,1.25878,0.0519,
0.0591,1.25166,0.05157,
0.0595,1.25156,0.0393,
0.0591,1.25166,0.05157,
0.05402,1.24445,0.04027,
0.05402,1.24445,0.04027,
0.0591,1.25166,0.05157,
0.05363,1.24466,0.05153,
0.04843,1.23908,0.05175,
0.04878,1.23893,0.04124,
0.05363,1.24466,0.05153,
0.05363,1.24466,0.05153,
0.04878,1.23893,0.04124,
0.05402,1.24445,0.04027,
0.04878,1.23893,0.04124,
0.04843,1.23908,0.05175,
0.04194,1.23381,0.04234,
0.04194,1.23381,0.04234,
0.04843,1.23908,0.05175,
0.04179,1.234,0.05217,
0.0333,1.22787,0.05316,
0.03257,1.22766,0.04363,
0.04179,1.234,0.05217,
0.04179,1.234,0.05217,
0.03257,1.22766,0.04363,
0.04194,1.23381,0.04234,
0.03257,1.22766,0.04363,
0.0333,1.22787,0.05316,
0.02345,1.22239,0.04425,
0.02345,1.22239,0.04425,
0.0333,1.22787,0.05316,
0.02252,1.2218,0.05414,
0,1.21446,0.05669,
0,1.21552,0.04637,
0.00648,1.21591,0.05586,
0.00648,1.21591,0.05586,
0,1.21552,0.04637,
0.00723,1.21695,0.04555,
0.0536,1.24468,0.02831,
0.05134,1.24564,0.01876,
0.05957,1.25153,0.02703,
0.05957,1.25153,0.02703,
0.05134,1.24564,0.01876,
0.05723,1.2522,0.01787,
0.02885,1.22882,0.02475,
0.03154,1.22797,0.03229,
0.01478,1.22193,0.02696,
0.01478,1.22193,0.02696,
0.03154,1.22797,0.03229,
0.01477,1.22046,0.03429,
0.03809,1.23448,0.02291,
0.02885,1.22882,0.02475,
0.03478,1.23507,0.01392,
0.03478,1.23507,0.01392,
0.02885,1.22882,0.02475,
0.02579,1.22936,0.01621,
0.03809,1.23448,0.02291,
0.03478,1.23507,0.01392,
0.04519,1.2403,0.02048,
0.04519,1.2403,0.02048,
0.03478,1.23507,0.01392,
0.04196,1.24137,0.01174,
0.05134,1.24564,0.01876,
0.04519,1.2403,0.02048,
0.04692,1.24657,0.00995,
0.04692,1.24657,0.00995,
0.04519,1.2403,0.02048,
0.04196,1.24137,0.01174,
0.03862,1.24182,-0.00035,
0.03204,1.23539,0.0016,
0.03316,1.23991,-0.01178,
0.03316,1.23991,-0.01178,
0.03204,1.23539,0.0016,
0.02721,1.23366,-0.00837,
0.02579,1.22936,0.01621,
0.02885,1.22882,0.02475,
0.01468,1.22309,0.01994,
0.01468,1.22309,0.01994,
0.02885,1.22882,0.02475,
0.01478,1.22193,0.02696,
0.06724,1.26686,0.01577,
0.06646,1.26648,0.02626,
0.06332,1.25867,0.01678,
0.06332,1.25867,0.01678,
0.06646,1.26648,0.02626,
0.06318,1.25883,0.02643,
0.07177,1.28009,0.01507,
0.07095,1.27964,0.02588,
0.06724,1.26686,0.01577,
0.06724,1.26686,0.01577,
0.07095,1.27964,0.02588,
0.06646,1.26648,0.02626,
0.07406,1.28893,0.01498,
0.0735,1.28922,0.02501,
0.07177,1.28009,0.01507,
0.07177,1.28009,0.01507,
0.0735,1.28922,0.02501,
0.07095,1.27964,0.02588,
0.0777,1.31122,0.01351,
0.07697,1.31061,0.02391,
0.07573,1.29881,0.01441,
0.07573,1.29881,0.01441,
0.07697,1.31061,0.02391,
0.07524,1.29924,0.02406,
-0.04652,1.37396,0.06917,
-0.04587,1.36008,0.07223,
-0.03149,1.37552,0.0749,
-0.03149,1.37552,0.0749,
-0.04587,1.36008,0.07223,
-0.03139,1.3603,0.07762,
-0.07023,1.27958,0.03767,
-0.06858,1.27965,0.0514,
-0.0723,1.28971,0.038,
-0.0723,1.28971,0.038,
-0.06858,1.27965,0.0514,
-0.07096,1.29082,0.0514,
-0.01638,1.25964,0.08754,
-0.02834,1.25692,0.08309,
-0.01459,1.25007,0.08622,
-0.01459,1.25007,0.08622,
-0.02834,1.25692,0.08309,
-0.02647,1.2487,0.08191,
-0.0061,1.21616,0.06352,
-0.00641,1.21884,0.06702,
-0.01248,1.21797,0.06233,
-0.01248,1.21797,0.06233,
-0.00641,1.21884,0.06702,
-0.01271,1.22073,0.0666,
-0.0061,1.21616,0.06352,
-0.01248,1.21797,0.06233,
-0.00648,1.21591,0.05586,
-0.00648,1.21591,0.05586,
-0.01248,1.21797,0.06233,
-0.01261,1.21771,0.05502,
-0.0591,1.25166,0.05157,
-0.05363,1.24466,0.05153,
-0.05795,1.25184,0.06138,
-0.05795,1.25184,0.06138,
-0.05363,1.24466,0.05153,
-0.05295,1.24491,0.06126,
-0.0333,1.22787,0.05316,
-0.02252,1.2218,0.05414,
-0.03282,1.22791,0.0613,
-0.03282,1.22791,0.0613,
-0.02252,1.2218,0.05414,
-0.02235,1.22209,0.06108,
-0.01248,1.21797,0.06233,
-0.02235,1.22209,0.06108,
-0.01261,1.21771,0.05502,
-0.01261,1.21771,0.05502,
-0.02235,1.22209,0.06108,
-0.02252,1.2218,0.05414,
-0.03978,1.23434,0.06523,
-0.03093,1.22799,0.06463,
-0.0362,1.23491,0.06981,
-0.0362,1.23491,0.06981,
-0.03093,1.22799,0.06463,
-0.02894,1.22811,0.0672,
-0.04379,1.24654,0.07316,
-0.04685,1.25365,0.07409,
-0.05024,1.24555,0.06635,
-0.05024,1.24555,0.06635,
-0.04685,1.25365,0.07409,
-0.05434,1.25252,0.06685,
-0.07023,1.27958,0.03767,
-0.0723,1.28971,0.038,
-0.07095,1.27964,0.02588,
-0.07095,1.27964,0.02588,
-0.0723,1.28971,0.038,
-0.0735,1.28922,0.02501,
-0.04799,1.23945,0.02951,
-0.0536,1.24468,0.02831,
-0.04519,1.2403,0.02048,
-0.04519,1.2403,0.02048,
-0.0536,1.24468,0.02831,
-0.05134,1.24564,0.01876,
-0.06318,1.25883,0.02643,
-0.05957,1.25153,0.02703,
-0.06331,1.25887,0.03872,
-0.06331,1.25887,0.03872,
-0.05957,1.25153,0.02703,
-0.0595,1.25156,0.0393,
-0.05957,1.25153,0.02703,
-0.0536,1.24468,0.02831,
-0.0595,1.25156,0.0393,
-0.0595,1.25156,0.0393,
-0.0536,1.24468,0.02831,
-0.05402,1.24445,0.04027,
-0.04692,1.24657,0.00995,
-0.05134,1.24564,0.01876,
-0.05342,1.25298,0.00928,
-0.05342,1.25298,0.00928,
-0.05134,1.24564,0.01876,
-0.05723,1.2522,0.01787,
-0.04799,1.23945,0.02951,
-0.04066,1.2338,0.03086,
-0.04878,1.23893,0.04124,
-0.04878,1.23893,0.04124,
-0.04066,1.2338,0.03086,
-0.04194,1.23381,0.04234,
-0.06571,1.26606,0.03842,
-0.07023,1.27958,0.03767,
-0.06646,1.26648,0.02626,
-0.06646,1.26648,0.02626,
-0.07023,1.27958,0.03767,
-0.07095,1.27964,0.02588,
-0.02345,1.22239,0.04425,
-0.01466,1.21907,0.0448,
-0.02252,1.2218,0.05414,
-0.02252,1.2218,0.05414,
-0.01466,1.21907,0.0448,
-0.01261,1.21771,0.05502,
-0.03085,1.27622,0.0786,
-0.03143,1.28117,0.07577,
-0.04158,1.27434,0.07525,
-0.04158,1.27434,0.07525,
-0.03143,1.28117,0.07577,
-0.04206,1.2795,0.07332,
-0.06571,1.26606,0.03842,
-0.06331,1.25887,0.03872,
-0.06464,1.26573,0.05144,
-0.06464,1.26573,0.05144,
-0.06331,1.25887,0.03872,
-0.06246,1.25878,0.0519,
-0.04685,1.25365,0.07409,
-0.04882,1.25987,0.07409,
-0.05434,1.25252,0.06685,
-0.05434,1.25252,0.06685,
-0.04882,1.25987,0.07409,
-0.05705,1.25905,0.06705,
0,1.2891,0.08302,
-0.0086,1.28722,0.08112,
0,1.28235,0.08499,
0,1.28235,0.08499,
-0.0086,1.28722,0.08112,
-0.00896,1.28062,0.0836,
-0.02943,1.26329,0.08302,
-0.03032,1.26928,0.08152,
-0.03952,1.26106,0.0782,
-0.03952,1.26106,0.0782,
-0.03032,1.26928,0.08152,
-0.04061,1.26738,0.07744,
-0.00949,1.26841,0.09069,
-0.00942,1.27385,0.08785,
-0.0179,1.26613,0.08729,
-0.0179,1.26613,0.08729,
-0.00942,1.27385,0.08785,
-0.01921,1.27166,0.0853,
-0.03149,1.37552,0.0749,
-0.03139,1.3603,0.07762,
-0.01352,1.37449,0.0805,
-0.01352,1.37449,0.0805,
-0.03139,1.3603,0.07762,
-0.01254,1.35949,0.0839,
-0.02173,1.31709,0.07831,
-0.02158,1.32687,0.07898,
-0.03247,1.32076,0.0765,
-0.03247,1.32076,0.0765,
-0.02158,1.32687,0.07898,
-0.0331,1.33154,0.07653,
-0.06019,1.27317,0.06641,
-0.05984,1.28019,0.06589,
-0.06462,1.27315,0.06051,
-0.06462,1.27315,0.06051,
-0.05984,1.28019,0.06589,
-0.06572,1.2798,0.06003,
-0.0362,1.23491,0.06981,
-0.04016,1.2401,0.07158,
-0.03978,1.23434,0.06523,
-0.03978,1.23434,0.06523,
-0.04016,1.2401,0.07158,
-0.04519,1.23945,0.06554,
-0.02432,1.24212,0.08008,
-0.01386,1.2431,0.08354,
-0.02647,1.2487,0.08191,
-0.02647,1.2487,0.08191,
-0.01386,1.2431,0.08354,
-0.01459,1.25007,0.08622,
-0.04066,1.2338,0.03086,
-0.03154,1.22797,0.03229,
-0.04194,1.23381,0.04234,
-0.04194,1.23381,0.04234,
-0.03154,1.22797,0.03229,
-0.03257,1.22766,0.04363,
-0.02885,1.22882,0.02475,
-0.03154,1.22797,0.03229,
-0.03809,1.23448,0.02291,
-0.03809,1.23448,0.02291,
-0.03154,1.22797,0.03229,
-0.04066,1.2338,0.03086,
-0.02432,1.24212,0.08008,
-0.02647,1.2487,0.08191,
-0.0329,1.24107,0.07576,
-0.0329,1.24107,0.07576,
-0.02647,1.2487,0.08191,
-0.0357,1.24758,0.07746,
-0.02647,1.2487,0.08191,
-0.02834,1.25692,0.08309,
-0.0357,1.24758,0.07746,
-0.0357,1.24758,0.07746,
-0.02834,1.25692,0.08309,
-0.03817,1.25507,0.07805,
-0.03085,1.27622,0.0786,
-0.02007,1.27859,0.08134,
-0.03143,1.28117,0.07577,
-0.03143,1.28117,0.07577,
-0.02007,1.27859,0.08134,
-0.02062,1.28425,0.07855,
-0.0179,1.26613,0.08729,
-0.02943,1.26329,0.08302,
-0.01638,1.25964,0.08754,
-0.01638,1.25964,0.08754,
-0.02943,1.26329,0.08302,
-0.02834,1.25692,0.08309,
-0.04611,1.34741,0.07345,
-0.04568,1.34193,0.07366,
-0.03228,1.34693,0.07827,
-0.03228,1.34693,0.07827,
-0.04568,1.34193,0.07366,
-0.03255,1.34161,0.07752,
-0.02007,1.27859,0.08134,
-0.00896,1.28062,0.0836,
-0.02062,1.28425,0.07855,
-0.02062,1.28425,0.07855,
-0.00896,1.28062,0.0836,
-0.0086,1.28722,0.08112,
-0.06877,1.37605,0.04344,
-0.06907,1.36504,0.05375,
-0.0588,1.38544,0.05528,
-0.0588,1.38544,0.05528,
-0.06907,1.36504,0.05375,
-0.0591,1.37122,0.06388,
-0.04606,1.38991,0.06157,
-0.04652,1.37396,0.06917,
-0.03157,1.39284,0.06592,
-0.03157,1.39284,0.06592,
-0.04652,1.37396,0.06917,
-0.03149,1.37552,0.0749,
-0.03157,1.39284,0.06592,
-0.03149,1.37552,0.0749,
-0.01383,1.39434,0.07041,
-0.01383,1.39434,0.07041,
-0.03149,1.37552,0.0749,
-0.01352,1.37449,0.0805,
0,1.28235,0.08499,
-0.00896,1.28062,0.0836,
0,1.27583,0.08966,
0,1.27583,0.08966,
-0.00896,1.28062,0.0836,
-0.00942,1.27385,0.08785,
-0.01921,1.27166,0.0853,
-0.00942,1.27385,0.08785,
-0.02007,1.27859,0.08134,
-0.02007,1.27859,0.08134,
-0.00942,1.27385,0.08785,
-0.00896,1.28062,0.0836,
-0.01921,1.27166,0.0853,
-0.02007,1.27859,0.08134,
-0.03032,1.26928,0.08152,
-0.03032,1.26928,0.08152,
-0.02007,1.27859,0.08134,
-0.03085,1.27622,0.0786,
-0.03032,1.26928,0.08152,
-0.03085,1.27622,0.0786,
-0.04061,1.26738,0.07744,
-0.04061,1.26738,0.07744,
-0.03085,1.27622,0.0786,
-0.04158,1.27434,0.07525,
-0.05919,1.26579,0.06679,
-0.06019,1.27317,0.06641,
-0.06294,1.26572,0.06097,
-0.06294,1.26572,0.06097,
-0.06019,1.27317,0.06641,
-0.06462,1.27315,0.06051,
-0.04158,1.27434,0.07525,
-0.04206,1.2795,0.07332,
-0.05151,1.27351,0.07187,
-0.05151,1.27351,0.07187,
-0.04206,1.2795,0.07332,
-0.05196,1.27963,0.0702,
-0.04061,1.26738,0.07744,
-0.04158,1.27434,0.07525,
-0.05076,1.26639,0.07326,
-0.05076,1.26639,0.07326,
-0.04158,1.27434,0.07525,
-0.05151,1.27351,0.07187,
-0.07238,1.30016,0.05146,
-0.07385,1.29993,0.03892,
-0.07096,1.29082,0.0514,
-0.07096,1.29082,0.0514,
-0.07385,1.29993,0.03892,
-0.0723,1.28971,0.038,
-0.0735,1.28922,0.02501,
-0.0723,1.28971,0.038,
-0.07524,1.29924,0.02406,
-0.07524,1.29924,0.02406,
-0.0723,1.28971,0.038,
-0.07385,1.29993,0.03892,
-0.03212,1.3029,0.07701,
-0.04321,1.30286,0.07431,
-0.03165,1.28557,0.07493,
-0.03165,1.28557,0.07493,
-0.04321,1.30286,0.07431,
-0.04222,1.28399,0.07212,
-0.03165,1.28557,0.07493,
-0.02113,1.28909,0.07723,
-0.03212,1.3029,0.07701,
-0.03212,1.3029,0.07701,
-0.02113,1.28909,0.07723,
-0.02172,1.30279,0.07802,
-0.04206,1.2795,0.07332,
-0.04222,1.28399,0.07212,
-0.05196,1.27963,0.0702,
-0.05196,1.27963,0.0702,
-0.04222,1.28399,0.07212,
-0.05192,1.28491,0.06885,
-0.04206,1.2795,0.07332,
-0.03143,1.28117,0.07577,
-0.04222,1.28399,0.07212,
-0.04222,1.28399,0.07212,
-0.03143,1.28117,0.07577,
-0.03165,1.28557,0.07493,
-0.02062,1.28425,0.07855,
-0.0086,1.28722,0.08112,
-0.02113,1.28909,0.07723,
-0.02113,1.28909,0.07723,
-0.0086,1.28722,0.08112,
-0.00832,1.29269,0.08068,
-0.0123,1.22821,0.07384,
-0.01271,1.22073,0.0666,
-0.00625,1.22817,0.07456,
-0.00625,1.22817,0.07456,
-0.01271,1.22073,0.0666,
-0.00641,1.21884,0.06702,
-0.07573,1.29881,0.01441,
-0.07406,1.28893,0.01498,
-0.07524,1.29924,0.02406,
-0.07524,1.29924,0.02406,
-0.07406,1.28893,0.01498,
-0.0735,1.28922,0.02501,
-0.05723,1.2522,0.01787,
-0.05957,1.25153,0.02703,
-0.0609,1.25604,0.01715,
-0.0609,1.25604,0.01715,
-0.05957,1.25153,0.02703,
-0.06318,1.25883,0.02643,
-0.04519,1.2403,0.02048,
-0.03809,1.23448,0.02291,
-0.04799,1.23945,0.02951,
-0.04799,1.23945,0.02951,
-0.03809,1.23448,0.02291,
-0.04066,1.2338,0.03086,
-0.0329,1.24107,0.07576,
-0.0357,1.24758,0.07746,
-0.04016,1.2401,0.07158,
-0.04016,1.2401,0.07158,
-0.0357,1.24758,0.07746,
-0.04379,1.24654,0.07316,
-0.0357,1.24758,0.07746,
-0.03817,1.25507,0.07805,
-0.04379,1.24654,0.07316,
-0.04379,1.24654,0.07316,
-0.03817,1.25507,0.07805,
-0.04685,1.25365,0.07409,
-0.03952,1.26106,0.0782,
-0.04061,1.26738,0.07744,
-0.04882,1.25987,0.07409,
-0.04882,1.25987,0.07409,
-0.04061,1.26738,0.07744,
-0.05076,1.26639,0.07326,
-0.02367,1.22848,0.07047,
-0.02972,1.23563,0.07368,
-0.02894,1.22811,0.0672,
-0.02894,1.22811,0.0672,
-0.02972,1.23563,0.07368,
-0.0362,1.23491,0.06981,
-0.06646,1.26648,0.02626,
-0.06318,1.25883,0.02643,
-0.06571,1.26606,0.03842,
-0.06571,1.26606,0.03842,
-0.06318,1.25883,0.02643,
-0.06331,1.25887,0.03872,
-0.05919,1.26579,0.06679,
-0.05705,1.25905,0.06705,
-0.05076,1.26639,0.07326,
-0.05076,1.26639,0.07326,
-0.05705,1.25905,0.06705,
-0.04882,1.25987,0.07409,
-0.03817,1.25507,0.07805,
-0.02834,1.25692,0.08309,
-0.03952,1.26106,0.0782,
-0.03952,1.26106,0.0782,
-0.02834,1.25692,0.08309,
-0.02943,1.26329,0.08302,
-0.0179,1.26613,0.08729,
-0.01921,1.27166,0.0853,
-0.02943,1.26329,0.08302,
-0.02943,1.26329,0.08302,
-0.01921,1.27166,0.0853,
-0.03032,1.26928,0.08152,
-0.04882,1.25987,0.07409,
-0.04685,1.25365,0.07409,
-0.03952,1.26106,0.0782,
-0.03952,1.26106,0.0782,
-0.04685,1.25365,0.07409,
-0.03817,1.25507,0.07805,
-0.0536,1.24468,0.02831,
-0.04799,1.23945,0.02951,
-0.05402,1.24445,0.04027,
-0.05402,1.24445,0.04027,
-0.04799,1.23945,0.02951,
-0.04878,1.23893,0.04124,
-0.04773,1.23921,0.06125,
-0.05295,1.24491,0.06126,
-0.04843,1.23908,0.05175,
-0.04843,1.23908,0.05175,
-0.05295,1.24491,0.06126,
-0.05363,1.24466,0.05153,
-0.04016,1.2401,0.07158,
-0.04379,1.24654,0.07316,
-0.04519,1.23945,0.06554,
-0.04519,1.23945,0.06554,
-0.04379,1.24654,0.07316,
-0.05024,1.24555,0.06635,
-0.02432,1.24212,0.08008,
-0.02085,1.23626,0.07781,
-0.01386,1.2431,0.08354,
-0.01386,1.2431,0.08354,
-0.02085,1.23626,0.07781,
-0.01315,1.23656,0.08006,
-0.0329,1.24107,0.07576,
-0.02972,1.23563,0.07368,
-0.02432,1.24212,0.08008,
-0.02432,1.24212,0.08008,
-0.02972,1.23563,0.07368,
-0.02085,1.23626,0.07781,
-0.04016,1.2401,0.07158,
-0.0362,1.23491,0.06981,
-0.0329,1.24107,0.07576,
-0.0329,1.24107,0.07576,
-0.0362,1.23491,0.06981,
-0.02972,1.23563,0.07368,
-0.06294,1.26572,0.06097,
-0.06076,1.25889,0.06133,
-0.05919,1.26579,0.06679,
-0.05919,1.26579,0.06679,
-0.06076,1.25889,0.06133,
-0.05705,1.25905,0.06705,
-0.06076,1.25889,0.06133,
-0.05795,1.25184,0.06138,
-0.05705,1.25905,0.06705,
-0.05705,1.25905,0.06705,
-0.05795,1.25184,0.06138,
-0.05434,1.25252,0.06685,
-0.00879,1.26171,0.09069,
-0.01638,1.25964,0.08754,
-0.0076,1.25064,0.088,
-0.0076,1.25064,0.088,
-0.01638,1.25964,0.08754,
-0.01459,1.25007,0.08622,
0,1.21802,0.06771,
-0.00641,1.21884,0.06702,
0,1.21473,0.06443,
0,1.21473,0.06443,
-0.00641,1.21884,0.06702,
-0.0061,1.21616,0.06352,
-0.00648,1.21591,0.05586,
0,1.21446,0.05669,
-0.0061,1.21616,0.06352,
-0.0061,1.21616,0.06352,
0,1.21446,0.05669,
0,1.21473,0.06443,
-0.01466,1.21907,0.0448,
-0.00723,1.21695,0.04555,
-0.01261,1.21771,0.05502,
-0.01261,1.21771,0.05502,
-0.00723,1.21695,0.04555,
-0.00648,1.21591,0.05586,
-0.00675,1.23669,0.08119,
-0.01315,1.23656,0.08006,
-0.00625,1.22817,0.07456,
-0.00625,1.22817,0.07456,
-0.01315,1.23656,0.08006,
-0.0123,1.22821,0.07384,
-0.00689,1.24354,0.08499,
-0.01386,1.2431,0.08354,
-0.00675,1.23669,0.08119,
-0.00675,1.23669,0.08119,
-0.01386,1.2431,0.08354,
-0.01315,1.23656,0.08006,
-0.00641,1.21884,0.06702,
0,1.21802,0.06771,
-0.00625,1.22817,0.07456,
-0.00625,1.22817,0.07456,
0,1.21802,0.06771,
0,1.22807,0.07536,
0,1.21761,0.03547,
-0.01477,1.22046,0.03429,
-9.54E-09,1.21815,0.02846,
-9.54E-09,1.21815,0.02846,
-0.01477,1.22046,0.03429,
-0.01478,1.22193,0.02696,
-0.0076,1.25064,0.088,
-0.01459,1.25007,0.08622,
-0.00689,1.24354,0.08499,
-0.00689,1.24354,0.08499,
-0.01459,1.25007,0.08622,
-0.01386,1.2431,0.08354,
0,1.30203,0.08279,
-0.00805,1.30202,0.08029,
0,1.29425,0.08259,
0,1.29425,0.08259,
-0.00805,1.30202,0.08029,
-0.00832,1.29269,0.08068,
0,1.27174,0.09502,
-0.00949,1.26841,0.09069,
0,1.26362,0.09341,
0,1.26362,0.09341,
-0.00949,1.26841,0.09069,
-0.00879,1.26171,0.09069,
0,1.23682,0.08214,
-0.00675,1.23669,0.08119,
0,1.22807,0.07536,
0,1.22807,0.07536,
-0.00675,1.23669,0.08119,
-0.00625,1.22817,0.07456,
0,1.24385,0.08592,
-0.00689,1.24354,0.08499,
0,1.23682,0.08214,
0,1.23682,0.08214,
-0.00689,1.24354,0.08499,
-0.00675,1.23669,0.08119,
0,1.27583,0.08966,
-0.00942,1.27385,0.08785,
0,1.27174,0.09502,
0,1.27174,0.09502,
-0.00942,1.27385,0.08785,
-0.00949,1.26841,0.09069,
0,1.32051,0.08428,
0,1.33386,0.08582,
-0.00931,1.32109,0.08153,
-0.00931,1.32109,0.08153,
0,1.33386,0.08582,
-0.0113,1.33435,0.08343,
0,1.37428,0.08327,
0,1.39425,0.07381,
-0.01352,1.37449,0.0805,
-0.01352,1.37449,0.0805,
0,1.39425,0.07381,
-0.01383,1.39434,0.07041,
0,1.3588,0.08654,
0,1.37428,0.08327,
-0.01254,1.35949,0.0839,
-0.01254,1.35949,0.0839,
0,1.37428,0.08327,
-0.01352,1.37449,0.0805,
0,1.33386,0.08582,
0,1.34487,0.08687,
-0.0113,1.33435,0.08343,
-0.0113,1.33435,0.08343,
0,1.34487,0.08687,
-0.0119,1.34577,0.08461,
0,1.29425,0.08259,
-0.00832,1.29269,0.08068,
0,1.2891,0.08302,
0,1.2891,0.08302,
-0.00832,1.29269,0.08068,
-0.0086,1.28722,0.08112,
0,1.34487,0.08687,
0,1.3588,0.08654,
-0.0119,1.34577,0.08461,
-0.0119,1.34577,0.08461,
0,1.3588,0.08654,
-0.01254,1.35949,0.0839,
0,1.25112,0.08923,
-0.0076,1.25064,0.088,
0,1.24385,0.08592,
0,1.24385,0.08592,
-0.0076,1.25064,0.088,
-0.00689,1.24354,0.08499,
0,1.30796,0.08302,
0,1.32051,0.08428,
-0.00789,1.31027,0.08043,
-0.00789,1.31027,0.08043,
0,1.32051,0.08428,
-0.00931,1.32109,0.08153,
-0.03228,1.34693,0.07827,
-0.01861,1.34091,0.08232,
-0.03188,1.35226,0.07853,
-0.03188,1.35226,0.07853,
-0.01861,1.34091,0.08232,
-0.0119,1.34577,0.08461,
-0.01638,1.25964,0.08754,
-0.00879,1.26171,0.09069,
-0.0179,1.26613,0.08729,
-0.0179,1.26613,0.08729,
-0.00879,1.26171,0.09069,
-0.00949,1.26841,0.09069,
-0.04535,1.3333,0.0733,
-0.05697,1.33119,0.0684,
-0.04407,1.32169,0.07304,
-0.04407,1.32169,0.07304,
-0.05697,1.33119,0.0684,
-0.0553,1.32054,0.06849,
-0.02113,1.28909,0.07723,
-0.03165,1.28557,0.07493,
-0.02062,1.28425,0.07855,
-0.02062,1.28425,0.07855,
-0.03165,1.28557,0.07493,
-0.03143,1.28117,0.07577,
-0.07847,1.32401,0.02407,
-0.07878,1.32355,0.01812,
-0.07697,1.31061,0.02391,
-0.07697,1.31061,0.02391,
-0.07878,1.32355,0.01812,
-0.0777,1.31122,0.01351,
-0.05876,1.35078,0.06779,
-0.04625,1.35276,0.07318,
-0.05924,1.35889,0.06756,
-0.05924,1.35889,0.06756,
-0.04625,1.35276,0.07318,
-0.04587,1.36008,0.07223,
-0.04625,1.35276,0.07318,
-0.03188,1.35226,0.07853,
-0.04587,1.36008,0.07223,
-0.04587,1.36008,0.07223,
-0.03188,1.35226,0.07853,
-0.03139,1.3603,0.07762,
-0.03139,1.3603,0.07762,
-0.03188,1.35226,0.07853,
-0.01254,1.35949,0.0839,
-0.01254,1.35949,0.0839,
-0.03188,1.35226,0.07853,
-0.0119,1.34577,0.08461,
-0.07308,1.334,0.0574,
-0.06715,1.34033,0.06291,
-0.07509,1.33874,0.05462,
-0.07509,1.33874,0.05462,
-0.06715,1.34033,0.06291,
-0.06822,1.34604,0.06187,
-0.05849,1.34513,0.06759,
-0.04611,1.34741,0.07345,
-0.05876,1.35078,0.06779,
-0.05876,1.35078,0.06779,
-0.04611,1.34741,0.07345,
-0.04625,1.35276,0.07318,
-0.03228,1.34693,0.07827,
-0.03188,1.35226,0.07853,
-0.04611,1.34741,0.07345,
-0.04611,1.34741,0.07345,
-0.03188,1.35226,0.07853,
-0.04625,1.35276,0.07318,
-0.04535,1.3333,0.0733,
-0.04407,1.32169,0.07304,
-0.0331,1.33154,0.07653,
-0.0331,1.33154,0.07653,
-0.04407,1.32169,0.07304,
-0.03247,1.32076,0.0765,
-0.07028,1.32903,0.06039,
-0.06566,1.33482,0.06396,
-0.07308,1.334,0.0574,
-0.07308,1.334,0.0574,
-0.06566,1.33482,0.06396,
-0.06715,1.34033,0.06291,
-0.06632,1.32308,0.0629,
-0.06281,1.32678,0.0652,
-0.07028,1.32903,0.06039,
-0.07028,1.32903,0.06039,
-0.06281,1.32678,0.0652,
-0.06566,1.33482,0.06396,
-0.05697,1.33119,0.0684,
-0.04535,1.3333,0.0733,
-0.05815,1.33938,0.06792,
-0.05815,1.33938,0.06792,
-0.04535,1.3333,0.0733,
-0.04568,1.34193,0.07366,
-0.07096,1.29082,0.0514,
-0.06858,1.27965,0.0514,
-0.06782,1.29159,0.05893,
-0.06782,1.29159,0.05893,
-0.06858,1.27965,0.0514,
-0.06572,1.2798,0.06003,
-0.0667,1.27327,0.05123,
-0.06464,1.26573,0.05144,
-0.06462,1.27315,0.06051,
-0.06462,1.27315,0.06051,
-0.06464,1.26573,0.05144,
-0.06294,1.26572,0.06097,
-0.06464,1.26573,0.05144,
-0.06246,1.25878,0.0519,
-0.06294,1.26572,0.06097,
-0.06294,1.26572,0.06097,
-0.06246,1.25878,0.0519,
-0.06076,1.25889,0.06133,
-0.06246,1.25878,0.0519,
-0.0591,1.25166,0.05157,
-0.06076,1.25889,0.06133,
-0.06076,1.25889,0.06133,
-0.0591,1.25166,0.05157,
-0.05795,1.25184,0.06138,
-0.0414,1.23409,0.06121,
-0.04773,1.23921,0.06125,
-0.04179,1.234,0.05217,
-0.04179,1.234,0.05217,
-0.04773,1.23921,0.06125,
-0.04843,1.23908,0.05175,
-0.04179,1.234,0.05217,
-0.0333,1.22787,0.05316,
-0.0414,1.23409,0.06121,
-0.0414,1.23409,0.06121,
-0.0333,1.22787,0.05316,
-0.03282,1.22791,0.0613,
-0.05024,1.24555,0.06635,
-0.05434,1.25252,0.06685,
-0.05295,1.24491,0.06126,
-0.05295,1.24491,0.06126,
-0.05434,1.25252,0.06685,
-0.05795,1.25184,0.06138,
-0.05295,1.24491,0.06126,
-0.04773,1.23921,0.06125,
-0.05024,1.24555,0.06635,
-0.05024,1.24555,0.06635,
-0.04773,1.23921,0.06125,
-0.04519,1.23945,0.06554,
-0.0414,1.23409,0.06121,
-0.03282,1.22791,0.0613,
-0.03978,1.23434,0.06523,
-0.03978,1.23434,0.06523,
-0.03282,1.22791,0.0613,
-0.03093,1.22799,0.06463,
-0.04773,1.23921,0.06125,
-0.0414,1.23409,0.06121,
-0.04519,1.23945,0.06554,
-0.04519,1.23945,0.06554,
-0.0414,1.23409,0.06121,
-0.03978,1.23434,0.06523,
-0.0579,1.28663,0.06556,
-0.05984,1.28019,0.06589,
-0.05192,1.28491,0.06885,
-0.05192,1.28491,0.06885,
-0.05984,1.28019,0.06589,
-0.05196,1.27963,0.0702,
-0.05984,1.28019,0.06589,
-0.06019,1.27317,0.06641,
-0.05196,1.27963,0.0702,
-0.05196,1.27963,0.0702,
-0.06019,1.27317,0.06641,
-0.05151,1.27351,0.07187,
-0.05076,1.26639,0.07326,
-0.05151,1.27351,0.07187,
-0.05919,1.26579,0.06679,
-0.05919,1.26579,0.06679,
-0.05151,1.27351,0.07187,
-0.06019,1.27317,0.06641,
-0.07023,1.27958,0.03767,
-0.06571,1.26606,0.03842,
-0.0667,1.27327,0.05123,
-0.0667,1.27327,0.05123,
-0.06571,1.26606,0.03842,
-0.06464,1.26573,0.05144,
-0.06462,1.27315,0.06051,
-0.06572,1.2798,0.06003,
-0.0667,1.27327,0.05123,
-0.0667,1.27327,0.05123,
-0.06572,1.2798,0.06003,
-0.06858,1.27965,0.0514,
-0.06962,1.3,0.05828,
-0.06782,1.29159,0.05893,
-0.06398,1.3,0.06301,
-0.06398,1.3,0.06301,
-0.06782,1.29159,0.05893,
-0.06242,1.29301,0.06351,
-0.06977,1.30839,0.05889,
-0.06962,1.3,0.05828,
-0.06391,1.30679,0.06338,
-0.06391,1.30679,0.06338,
-0.06962,1.3,0.05828,
-0.06398,1.3,0.06301,
-0.06885,1.31663,0.06067,
-0.06977,1.30839,0.05889,
-0.06239,1.31339,0.06486,
-0.06239,1.31339,0.06486,
-0.06977,1.30839,0.05889,
-0.06391,1.30679,0.06338,
-0.06822,1.34604,0.06187,
-0.05876,1.35078,0.06779,
-0.07054,1.35545,0.05999,
-0.07054,1.35545,0.05999,
-0.05876,1.35078,0.06779,
-0.05924,1.35889,0.06756,
-0.05876,1.35078,0.06779,
-0.06822,1.34604,0.06187,
-0.05849,1.34513,0.06759,
-0.05849,1.34513,0.06759,
-0.06822,1.34604,0.06187,
-0.06715,1.34033,0.06291,
-0.05849,1.34513,0.06759,
-0.06715,1.34033,0.06291,
-0.05815,1.33938,0.06792,
-0.05815,1.33938,0.06792,
-0.06715,1.34033,0.06291,
-0.06566,1.33482,0.06396,
-0.05815,1.33938,0.06792,
-0.06566,1.33482,0.06396,
-0.05697,1.33119,0.0684,
-0.05697,1.33119,0.0684,
-0.06566,1.33482,0.06396,
-0.06281,1.32678,0.0652,
-0.04407,1.32169,0.07304,
-0.04321,1.30286,0.07431,
-0.03247,1.32076,0.0765,
-0.03247,1.32076,0.0765,
-0.04321,1.30286,0.07431,
-0.03212,1.3029,0.07701,
-0.03247,1.32076,0.0765,
-0.03212,1.3029,0.07701,
-0.02173,1.31709,0.07831,
-0.02173,1.31709,0.07831,
-0.03212,1.3029,0.07701,
-0.02172,1.30279,0.07802,
-0.02173,1.31709,0.07831,
-0.02172,1.30279,0.07802,
-0.01438,1.31283,0.07895,
-0.01438,1.31283,0.07895,
-0.02172,1.30279,0.07802,
-0.01492,1.3025,0.0785,
-0.01438,1.31283,0.07895,
-0.01492,1.3025,0.0785,
-0.00789,1.31027,0.08043,
-0.00789,1.31027,0.08043,
-0.01492,1.3025,0.0785,
-0.00805,1.30202,0.08029,
-0.00789,1.31027,0.08043,
-0.00805,1.30202,0.08029,
0,1.30796,0.08302,
0,1.30796,0.08302,
-0.00805,1.30202,0.08029,
0,1.30203,0.08279,
-0.04222,1.28399,0.07212,
-0.04321,1.30286,0.07431,
-0.05192,1.28491,0.06885,
-0.05192,1.28491,0.06885,
-0.04321,1.30286,0.07431,
-0.05322,1.30294,0.07019,
-0.01438,1.31283,0.07895,
-0.00931,1.32109,0.08153,
-0.02173,1.31709,0.07831,
-0.02173,1.31709,0.07831,
-0.00931,1.32109,0.08153,
-0.02158,1.32687,0.07898,
-0.04568,1.34193,0.07366,
-0.04535,1.3333,0.0733,
-0.03255,1.34161,0.07752,
-0.03255,1.34161,0.07752,
-0.04535,1.3333,0.0733,
-0.0331,1.33154,0.07653,
-0.06962,1.3,0.05828,
-0.06977,1.30839,0.05889,
-0.07238,1.30016,0.05146,
-0.07238,1.30016,0.05146,
-0.06977,1.30839,0.05889,
-0.0731,1.30975,0.0524,
-0.07238,1.30016,0.05146,
-0.07096,1.29082,0.0514,
-0.06962,1.3,0.05828,
-0.06962,1.3,0.05828,
-0.07096,1.29082,0.0514,
-0.06782,1.29159,0.05893,
-0.05815,1.33938,0.06792,
-0.04568,1.34193,0.07366,
-0.05849,1.34513,0.06759,
-0.05849,1.34513,0.06759,
-0.04568,1.34193,0.07366,
-0.04611,1.34741,0.07345,
-0.04652,1.37396,0.06917,
-0.0591,1.37122,0.06388,
-0.04587,1.36008,0.07223,
-0.04587,1.36008,0.07223,
-0.0591,1.37122,0.06388,
-0.05924,1.35889,0.06756,
-0.0591,1.37122,0.06388,
-0.04652,1.37396,0.06917,
-0.0588,1.38544,0.05528,
-0.0588,1.38544,0.05528,
-0.04652,1.37396,0.06917,
-0.04606,1.38991,0.06157,
-9.54E-09,1.21815,0.02846,
-0.01478,1.22193,0.02696,
-4.77E-09,1.21742,0.02325,
-4.77E-09,1.21742,0.02325,
-0.01478,1.22193,0.02696,
-0.01468,1.22309,0.01994,
-0.02721,1.23366,-0.00837,
-0.03316,1.23991,-0.01178,
-0.02003,1.23105,-0.02186,
-0.02003,1.23105,-0.02186,
-0.03316,1.23991,-0.01178,
-0.0228,1.23744,-0.02778,
-0.02003,1.23105,-0.02186,
-0.0228,1.23744,-0.02778,
0,1.22734,-0.03069,
0,1.22734,-0.03069,
-0.0228,1.23744,-0.02778,
0,1.23344,-0.03937,
-0.07549,1.35591,0.04042,
-0.0794,1.3395,0.02451,
-0.07662,1.34467,0.04784,
-0.07662,1.34467,0.04784,
-0.0794,1.3395,0.02451,
-0.07767,1.32552,0.03517,
-0.07662,1.34467,0.04784,
-0.07054,1.35545,0.05999,
-0.07549,1.35591,0.04042,
-0.07549,1.35591,0.04042,
-0.07054,1.35545,0.05999,
-0.06907,1.36504,0.05375,
-0.07594,1.31054,0.03424,
-0.07447,1.31052,0.04458,
-0.07767,1.32552,0.03517,
-0.07767,1.32552,0.03517,
-0.07447,1.31052,0.04458,
-0.07591,1.32547,0.04546,
-0.07767,1.32552,0.03517,
-0.07847,1.32401,0.02407,
-0.07594,1.31054,0.03424,
-0.07594,1.31054,0.03424,
-0.07847,1.32401,0.02407,
-0.07697,1.31061,0.02391,
-0.07385,1.29993,0.03892,
-0.07594,1.31054,0.03424,
-0.07524,1.29924,0.02406,
-0.07524,1.29924,0.02406,
-0.07594,1.31054,0.03424,
-0.07697,1.31061,0.02391,
-0.0591,1.25166,0.05157,
-0.06246,1.25878,0.0519,
-0.0595,1.25156,0.0393,
-0.0595,1.25156,0.0393,
-0.06246,1.25878,0.0519,
-0.06331,1.25887,0.03872,
-0.05363,1.24466,0.05153,
-0.0591,1.25166,0.05157,
-0.05402,1.24445,0.04027,
-0.05402,1.24445,0.04027,
-0.0591,1.25166,0.05157,
-0.0595,1.25156,0.0393,
-0.04843,1.23908,0.05175,
-0.05363,1.24466,0.05153,
-0.04878,1.23893,0.04124,
-0.04878,1.23893,0.04124,
-0.05363,1.24466,0.05153,
-0.05402,1.24445,0.04027,
-0.04179,1.234,0.05217,
-0.04843,1.23908,0.05175,
-0.04194,1.23381,0.04234,
-0.04194,1.23381,0.04234,
-0.04843,1.23908,0.05175,
-0.04878,1.23893,0.04124,
-0.0333,1.22787,0.05316,
-0.04179,1.234,0.05217,
-0.03257,1.22766,0.04363,
-0.03257,1.22766,0.04363,
-0.04179,1.234,0.05217,
-0.04194,1.23381,0.04234,
-0.02252,1.2218,0.05414,
-0.0333,1.22787,0.05316,
-0.02345,1.22239,0.04425,
-0.02345,1.22239,0.04425,
-0.0333,1.22787,0.05316,
-0.03257,1.22766,0.04363,
0,1.21446,0.05669,
-0.00648,1.21591,0.05586,
0,1.21552,0.04637,
0,1.21552,0.04637,
-0.00648,1.21591,0.05586,
-0.00723,1.21695,0.04555,
-0.0536,1.24468,0.02831,
-0.05957,1.25153,0.02703,
-0.05134,1.24564,0.01876,
-0.05134,1.24564,0.01876,
-0.05957,1.25153,0.02703,
-0.05723,1.2522,0.01787,
-0.01477,1.22046,0.03429,
-0.03154,1.22797,0.03229,
-0.01478,1.22193,0.02696,
-0.01478,1.22193,0.02696,
-0.03154,1.22797,0.03229,
-0.02885,1.22882,0.02475,
-0.02579,1.22936,0.01621,
-0.02885,1.22882,0.02475,
-0.03478,1.23507,0.01392,
-0.03478,1.23507,0.01392,
-0.02885,1.22882,0.02475,
-0.03809,1.23448,0.02291,
-0.04196,1.24137,0.01174,
-0.03478,1.23507,0.01392,
-0.04519,1.2403,0.02048,
-0.04519,1.2403,0.02048,
-0.03478,1.23507,0.01392,
-0.03809,1.23448,0.02291,
-0.04196,1.24137,0.01174,
-0.04519,1.2403,0.02048,
-0.04692,1.24657,0.00995,
-0.04692,1.24657,0.00995,
-0.04519,1.2403,0.02048,
-0.05134,1.24564,0.01876,
-0.02721,1.23366,-0.00837,
-0.03204,1.23539,0.0016,
-0.03316,1.23991,-0.01178,
-0.03316,1.23991,-0.01178,
-0.03204,1.23539,0.0016,
-0.03862,1.24182,-0.00035,
-0.01478,1.22193,0.02696,
-0.02885,1.22882,0.02475,
-0.01468,1.22309,0.01994,
-0.01468,1.22309,0.01994,
-0.02885,1.22882,0.02475,
-0.02579,1.22936,0.01621,
-0.06318,1.25883,0.02643,
-0.06646,1.26648,0.02626,
-0.06332,1.25867,0.01678,
-0.06332,1.25867,0.01678,
-0.06646,1.26648,0.02626,
-0.06724,1.26686,0.01577,
-0.06646,1.26648,0.02626,
-0.07095,1.27964,0.02588,
-0.06724,1.26686,0.01577,
-0.06724,1.26686,0.01577,
-0.07095,1.27964,0.02588,
-0.07177,1.28009,0.01507,
-0.07095,1.27964,0.02588,
-0.0735,1.28922,0.02501,
-0.07177,1.28009,0.01507,
-0.07177,1.28009,0.01507,
-0.0735,1.28922,0.02501,
-0.07406,1.28893,0.01498,
-0.07524,1.29924,0.02406,
-0.07697,1.31061,0.02391,
-0.07573,1.29881,0.01441,
-0.07573,1.29881,0.01441,
-0.07697,1.31061,0.02391,
-0.0777,1.31122,0.01351,
0.0591,1.37122,0.06388,
0.05924,1.35889,0.06756,
0.07054,1.35545,0.05999,
0.02503,1.23017,0.00403,
0.02579,1.22936,0.01621,
0.01468,1.22309,0.01994,
0.0465,1.25056,-0.00266,
0.04692,1.24657,0.00995,
0.03862,1.24182,-0.00035,
0.04692,1.24657,0.00995,
0.0465,1.25056,-0.00266,
0.05342,1.25298,0.00928,
0.03257,1.22766,0.04363,
0.02345,1.22239,0.04425,
0.03154,1.22797,0.03229,
0.01268,1.2208,0.0666,
0.02368,1.22846,0.07047,
0.0123,1.22821,0.07384,
0.02368,1.22846,0.07047,
0.02201,1.22309,0.06484,
0.02894,1.22811,0.0672,
0.02894,1.22811,0.0672,
0.02201,1.22309,0.06484,
0.03093,1.22799,0.06463,
0.0465,1.25056,-0.00266,
0.06285,1.26415,-0.00309,
0.05342,1.25298,0.00928,
0.07238,1.30016,0.05146,
0.07447,1.31052,0.04458,
0.0731,1.30975,0.0524,
0.06792,1.26025,0.00094,
0.0767,1.26597,-0.00501,
0.07484,1.2602,0.00476,
0.07484,1.2602,0.00476,
0.0767,1.26597,-0.00501,
0.08302,1.26481,-0.00194,
0.08186,1.2745,-0.00958,
0.08692,1.28517,-0.01328,
0.09014,1.27388,-0.0067,
0.09014,1.27388,-0.0067,
0.08692,1.28517,-0.01328,
0.09604,1.28501,-0.00966,
0.06332,1.25867,0.01678,
0.06045,1.25965,0.00845,
0.07484,1.2602,0.00476,
0.07484,1.2602,0.00476,
0.06045,1.25965,0.00845,
0.06792,1.26025,0.00094,
0.09742,1.31077,-0.00592,
0.09808,1.298,-0.00908,
0.09008,1.31005,-0.01032,
0.09008,1.31005,-0.01032,
0.09808,1.298,-0.00908,
0.09021,1.29688,-0.01302,
0.08611,1.31463,-0.00524,
0.07816,1.31599,0.00132,
0.08818,1.31783,0.00459,
0.08611,1.31463,-0.00524,
0.08818,1.31783,0.00459,
0.09493,1.31596,-0.00147,
0.02235,1.22209,0.06108,
0.02201,1.22309,0.06484,
0.0124,1.21815,0.06233,
0.0124,1.21815,0.06233,
0.02201,1.22309,0.06484,
0.01268,1.2208,0.0666,
0.0767,1.26597,-0.00501,
0.08186,1.2745,-0.00958,
0.08302,1.26481,-0.00194,
0.08302,1.26481,-0.00194,
0.08186,1.2745,-0.00958,
0.09014,1.27388,-0.0067,
0.02113,1.28909,0.07723,
0.01492,1.3025,0.0785,
0.00832,1.29269,0.08068,
0.00832,1.29269,0.08068,
0.01492,1.3025,0.0785,
0.00805,1.30202,0.08029,
0.06239,1.31339,0.06486,
0.05322,1.30294,0.07019,
0.06391,1.30679,0.06338,
0.02172,1.30279,0.07802,
0.01492,1.3025,0.0785,
0.02113,1.28909,0.07723,
0.0579,1.28663,0.06556,
0.05322,1.30294,0.07019,
0.05192,1.28491,0.06885,
0.07198,1.32098,0.05679,
0.06885,1.31663,0.06067,
0.0731,1.30975,0.0524,
0.0731,1.30975,0.0524,
0.06885,1.31663,0.06067,
0.06977,1.30839,0.05889,
0.09808,1.298,-0.00908,
0.09604,1.28501,-0.00966,
0.09021,1.29688,-0.01302,
0.09021,1.29688,-0.01302,
0.09604,1.28501,-0.00966,
0.08692,1.28517,-0.01328,
0.08611,1.31463,-0.00524,
0.09493,1.31596,-0.00147,
0.09008,1.31005,-0.01032,
0.09008,1.31005,-0.01032,
0.09493,1.31596,-0.00147,
0.09742,1.31077,-0.00592,
0.05342,1.25298,0.00928,
0.06045,1.25965,0.00845,
0.0609,1.25604,0.01715,
0.0609,1.25604,0.01715,
0.06045,1.25965,0.00845,
0.06332,1.25867,0.01678,
0.02972,1.23563,0.07368,
0.02085,1.23626,0.07781,
0.02368,1.22846,0.07047,
0.02368,1.22846,0.07047,
0.02085,1.23626,0.07781,
0.0123,1.22821,0.07384,
0.00879,1.26171,0.09069,
0,1.25884,0.09174,
0.0076,1.25064,0.088,
0.0076,1.25064,0.088,
0,1.25884,0.09174,
0,1.25112,0.08923,
0,1.21761,0.03547,
0.00723,1.21695,0.04555,
0,1.21552,0.04637,
0.07023,1.27958,0.03767,
0.06858,1.27965,0.0514,
0.0667,1.27327,0.05123,
0.01315,1.23656,0.08006,
0.0123,1.22821,0.07384,
0.02085,1.23626,0.07781,
0.05723,1.2522,0.01787,
0.05342,1.25298,0.00928,
0.0609,1.25604,0.01715,
0.06045,1.25965,0.00845,
0.06285,1.26415,-0.00309,
0.07262,1.27969,-0.00539,
0.0777,1.31122,0.01351,
0.07573,1.29881,0.01441,
0.08438,1.28612,0.00471,
0.09008,1.31005,-0.01032,
0.07882,1.29978,-0.00559,
0.08611,1.31463,-0.00524,
0.08438,1.28612,0.00471,
0.09014,1.27388,-0.0067,
0.09604,1.28501,-0.00966,
0.06724,1.26686,0.01577,
0.06332,1.25867,0.01678,
0.07484,1.2602,0.00476,
0.08438,1.28612,0.00471,
0.09808,1.298,-0.00908,
0.09742,1.31077,-0.00592,
0.07484,1.2602,0.00476,
0.08302,1.26481,-0.00194,
0.08438,1.28612,0.00471,
0.08438,1.28612,0.00471,
0.09493,1.31596,-0.00147,
0.08818,1.31783,0.00459,
0.07262,1.27969,-0.00539,
0.06792,1.26025,0.00094,
0.06045,1.25965,0.00845,
0.08692,1.28517,-0.01328,
0.08186,1.2745,-0.00958,
0.07262,1.27969,-0.00539,
0.0767,1.26597,-0.00501,
0.06792,1.26025,0.00094,
0.07262,1.27969,-0.00539,
0.08692,1.28517,-0.01328,
0.07262,1.27969,-0.00539,
0.09021,1.29688,-0.01302,
0.08438,1.28612,0.00471,
0.08818,1.31783,0.00459,
0.0777,1.31122,0.01351,
0.08818,1.31783,0.00459,
0.07816,1.31599,0.00132,
0.0777,1.31122,0.01351,
0.07262,1.27969,-0.00539,
0.07882,1.29978,-0.00559,
0.09008,1.31005,-0.01032,
0.08186,1.2745,-0.00958,
0.0767,1.26597,-0.00501,
0.07262,1.27969,-0.00539,
0.08438,1.28612,0.00471,
0.08302,1.26481,-0.00194,
0.09014,1.27388,-0.0067,
0.08438,1.28612,0.00471,
0.07573,1.29881,0.01441,
0.07406,1.28893,0.01498,
0.08438,1.28612,0.00471,
0.06724,1.26686,0.01577,
0.07484,1.2602,0.00476,
0.08438,1.28612,0.00471,
0.07406,1.28893,0.01498,
0.07177,1.28009,0.01507,
0.08438,1.28612,0.00471,
0.09604,1.28501,-0.00966,
0.09808,1.298,-0.00908,
0.07262,1.27969,-0.00539,
0.09008,1.31005,-0.01032,
0.09021,1.29688,-0.01302,
0.08438,1.28612,0.00471,
0.09742,1.31077,-0.00592,
0.09493,1.31596,-0.00147,
0.07882,1.29978,-0.00559,
0.07816,1.31599,0.00132,
0.08611,1.31463,-0.00524,
0.08438,1.28612,0.00471,
0.07177,1.28009,0.01507,
0.06724,1.26686,0.01577,
0,1.26362,0.09341,
0,1.25884,0.09174,
0.00879,1.26171,0.09069,
0.00931,1.32109,0.08153,
0.00789,1.31027,0.08043,
0.01438,1.31283,0.07895,
0.07198,1.32098,0.05679,
0.07426,1.32357,0.05277,
0.07308,1.334,0.0574,
0.06885,1.31663,0.06067,
0.07198,1.32098,0.05679,
0.06632,1.32308,0.0629,
0.06632,1.32308,0.0629,
0.07198,1.32098,0.05679,
0.07028,1.32903,0.06039,
0.07509,1.33874,0.05462,
0.07662,1.34467,0.04784,
0.07054,1.35545,0.05999,
0.0215,1.33679,0.08078,
0.0113,1.33435,0.08343,
0.00931,1.32109,0.08153,
0.03228,1.34693,0.07827,
0.01861,1.34091,0.08232,
0.03255,1.34161,0.07752,
0.0215,1.33679,0.08078,
0.01861,1.34091,0.08232,
0.0113,1.33435,0.08343,
0.03255,1.34161,0.07752,
0.02158,1.32687,0.07898,
0.0331,1.33154,0.07653,
0.07198,1.32098,0.05679,
0.07308,1.334,0.0574,
0.07028,1.32903,0.06039,
0.07426,1.32357,0.05277,
0.07447,1.31052,0.04458,
0.07591,1.32547,0.04546,
0.07308,1.334,0.0574,
0.07426,1.32357,0.05277,
0.07509,1.33874,0.05462,
0.03282,1.22791,0.0613,
0.03093,1.22799,0.06463,
0.02235,1.22209,0.06108,
0.02235,1.22209,0.06108,
0.03093,1.22799,0.06463,
0.02201,1.22309,0.06484,
0.0731,1.30975,0.0524,
0.07426,1.32357,0.05277,
0.07198,1.32098,0.05679,
0.0579,1.28663,0.06556,
0.05984,1.28019,0.06589,
0.06362,1.28358,0.06286,
0.0579,1.28663,0.06556,
0.06362,1.28358,0.06286,
0.06242,1.29301,0.06351,
0.06242,1.29301,0.06351,
0.06362,1.28358,0.06286,
0.06782,1.29159,0.05893,
0.06362,1.28358,0.06286,
0.06572,1.2798,0.06003,
0.06782,1.29159,0.05893,
0.05322,1.30294,0.07019,
0.06242,1.29301,0.06351,
0.06398,1.3,0.06301,
0.05322,1.30294,0.07019,
0.06398,1.3,0.06301,
0.06391,1.30679,0.06338,
0.05322,1.30294,0.07019,
0.0579,1.28663,0.06556,
0.06242,1.29301,0.06351,
0.0553,1.32054,0.06849,
0.06632,1.32308,0.0629,
0.06281,1.32678,0.0652,
0.06239,1.31339,0.06486,
0.0553,1.32054,0.06849,
0.05322,1.30294,0.07019,
0.05697,1.33119,0.0684,
0.0553,1.32054,0.06849,
0.06281,1.32678,0.0652,
0.04321,1.30286,0.07431,
0.05322,1.30294,0.07019,
0.04407,1.32169,0.07304,
0.04407,1.32169,0.07304,
0.05322,1.30294,0.07019,
0.0553,1.32054,0.06849,
0.01861,1.34091,0.08232,
0.0119,1.34577,0.08461,
0.0113,1.33435,0.08343,
0.0215,1.33679,0.08078,
0.03255,1.34161,0.07752,
0.01861,1.34091,0.08232,
0.00931,1.32109,0.08153,
0.02158,1.32687,0.07898,
0.0215,1.33679,0.08078,
0.03255,1.34161,0.07752,
0.0215,1.33679,0.08078,
0.02158,1.32687,0.07898,
0.07447,1.31052,0.04458,
0.07426,1.32357,0.05277,
0.0731,1.30975,0.0524,
0.07385,1.29993,0.03892,
0.07447,1.31052,0.04458,
0.07238,1.30016,0.05146,
0.06239,1.31339,0.06486,
0.06885,1.31663,0.06067,
0.06632,1.32308,0.0629,
0.06239,1.31339,0.06486,
0.06632,1.32308,0.0629,
0.0553,1.32054,0.06849,
0.03204,1.23539,0.0016,
0.03862,1.24182,-0.00035,
0.04196,1.24137,0.01174,
0.07591,1.32547,0.04546,
0.07509,1.33874,0.05462,
0.07426,1.32357,0.05277,
0.07591,1.32547,0.04546,
0.07662,1.34467,0.04784,
0.07509,1.33874,0.05462,
0.07662,1.34467,0.04784,
0.07591,1.32547,0.04546,
0.07767,1.32552,0.03517,
0.07385,1.29993,0.03892,
0.07594,1.31054,0.03424,
0.07447,1.31052,0.04458,
0.07767,1.32552,0.03517,
0.07847,1.32401,0.02407,
0.0794,1.3395,0.02451,
0.07549,1.35591,0.04042,
0.06877,1.37605,0.04344,
0.06907,1.36504,0.05375,
0.07549,1.35591,0.04042,
0.07521,1.36243,0.03473,
0.06877,1.37605,0.04344,
0.01477,1.22046,0.03429,
0.02345,1.22239,0.04425,
0.01466,1.21907,0.0448,
0.01477,1.22046,0.03429,
0.01466,1.21907,0.0448,
0.00723,1.21695,0.04555,
0.02503,1.23017,0.00403,
0.03204,1.23539,0.0016,
0.03478,1.23507,0.01392,
0.02721,1.23366,-0.00837,
0.03204,1.23539,0.0016,
0.02503,1.23017,0.00403,
0.03316,1.23991,-0.01178,
0.0465,1.25056,-0.00266,
0.03862,1.24182,-0.00035,
0.01477,1.22046,0.03429,
0.00723,1.21695,0.04555,
0,1.21761,0.03547,
0.03154,1.22797,0.03229,
0.02345,1.22239,0.04425,
0.01477,1.22046,0.03429,
0.02503,1.23017,0.00403,
0.03478,1.23507,0.01392,
0.02579,1.22936,0.01621,
0.03204,1.23539,0.0016,
0.04196,1.24137,0.01174,
0.03478,1.23507,0.01392,
0.03862,1.24182,-0.00035,
0.04692,1.24657,0.00995,
0.04196,1.24137,0.01174,
0.05342,1.25298,0.00928,
0.06285,1.26415,-0.00309,
0.06045,1.25965,0.00845,
0.0465,1.25056,-0.00266,
0.03316,1.23991,-0.01178,
0.0434,1.24935,-0.0162,
0.0434,1.24935,-0.0162,
0.06285,1.26415,-0.00309,
0.0465,1.25056,-0.00266,
0.03316,1.23991,-0.01178,
0.0228,1.23744,-0.02778,
0.0434,1.24935,-0.0162,
0.06362,1.28358,0.06286,
0.05984,1.28019,0.06589,
0.06572,1.2798,0.06003,
0.07509,1.33874,0.05462,
0.07054,1.35545,0.05999,
0.06822,1.34604,0.06187,
0.0591,1.37122,0.06388,
0.07054,1.35545,0.05999,
0.06907,1.36504,0.05375,
0.0609,1.25604,0.01715,
0.06332,1.25867,0.01678,
0.06318,1.25883,0.02643,
0.07878,1.32355,0.01812,
0.0794,1.3395,0.02451,
0.07847,1.32401,0.02407,
0.0794,1.3395,0.02451,
0.07521,1.36243,0.03473,
0.07549,1.35591,0.04042,
0.01268,1.2208,0.0666,
0.02201,1.22309,0.06484,
0.02368,1.22846,0.07047,
-0.0591,1.37122,0.06388,
-0.07054,1.35545,0.05999,
-0.05924,1.35889,0.06756,
-0.02503,1.23017,0.00403,
-0.01468,1.22309,0.01994,
-0.02579,1.22936,0.01621,
-0.0465,1.25056,-0.00266,
-0.03862,1.24182,-0.00035,
-0.04692,1.24657,0.00995,
-0.04692,1.24657,0.00995,
-0.05342,1.25298,0.00928,
-0.0465,1.25056,-0.00266,
-0.03257,1.22766,0.04363,
-0.03154,1.22797,0.03229,
-0.02345,1.22239,0.04425,
-0.01271,1.22073,0.0666,
-0.0123,1.22821,0.07384,
-0.02367,1.22848,0.07047,
-0.02367,1.22848,0.07047,
-0.02894,1.22811,0.0672,
-0.02201,1.22309,0.06484,
-0.02894,1.22811,0.0672,
-0.03093,1.22799,0.06463,
-0.02201,1.22309,0.06484,
-0.0465,1.25056,-0.00266,
-0.05342,1.25298,0.00928,
-0.06285,1.26415,-0.00309,
-0.07238,1.30016,0.05146,
-0.0731,1.30975,0.0524,
-0.07447,1.31052,0.04458,
-0.06792,1.26025,0.00094,
-0.07484,1.2602,0.00476,
-0.0767,1.26597,-0.00501,
-0.07484,1.2602,0.00476,
-0.08302,1.26481,-0.00194,
-0.0767,1.26597,-0.00501,
-0.08186,1.2745,-0.00958,
-0.09014,1.27388,-0.0067,
-0.08692,1.28517,-0.01328,
-0.09014,1.27388,-0.0067,
-0.09604,1.28501,-0.00966,
-0.08692,1.28517,-0.01328,
-0.06332,1.25867,0.01678,
-0.07484,1.2602,0.00476,
-0.06045,1.25965,0.00845,
-0.07484,1.2602,0.00476,
-0.06792,1.26025,0.00094,
-0.06045,1.25965,0.00845,
-0.09742,1.31077,-0.00592,
-0.09008,1.31005,-0.01032,
-0.09808,1.298,-0.00908,
-0.09008,1.31005,-0.01032,
-0.09021,1.29688,-0.01302,
-0.09808,1.298,-0.00908,
-0.08611,1.31463,-0.00524,
-0.08818,1.31783,0.00459,
-0.07816,1.31599,0.00132,
-0.08611,1.31463,-0.00524,
-0.09493,1.31596,-0.00147,
-0.08818,1.31783,0.00459,
-0.02235,1.22209,0.06108,
-0.01248,1.21797,0.06233,
-0.02201,1.22309,0.06484,
-0.01248,1.21797,0.06233,
-0.01271,1.22073,0.0666,
-0.02201,1.22309,0.06484,
-0.0767,1.26597,-0.00501,
-0.08302,1.26481,-0.00194,
-0.08186,1.2745,-0.00958,
-0.08302,1.26481,-0.00194,
-0.09014,1.27388,-0.0067,
-0.08186,1.2745,-0.00958,
-0.02113,1.28909,0.07723,
-0.00832,1.29269,0.08068,
-0.01492,1.3025,0.0785,
-0.00832,1.29269,0.08068,
-0.00805,1.30202,0.08029,
-0.01492,1.3025,0.0785,
-0.06239,1.31339,0.06486,
-0.06391,1.30679,0.06338,
-0.05322,1.30294,0.07019,
-0.02172,1.30279,0.07802,
-0.02113,1.28909,0.07723,
-0.01492,1.3025,0.0785,
-0.0579,1.28663,0.06556,
-0.05192,1.28491,0.06885,
-0.05322,1.30294,0.07019,
-0.07198,1.32098,0.05679,
-0.0731,1.30975,0.0524,
-0.06885,1.31663,0.06067,
-0.0731,1.30975,0.0524,
-0.06977,1.30839,0.05889,
-0.06885,1.31663,0.06067,
-0.09808,1.298,-0.00908,
-0.09021,1.29688,-0.01302,
-0.09604,1.28501,-0.00966,
-0.09021,1.29688,-0.01302,
-0.08692,1.28517,-0.01328,
-0.09604,1.28501,-0.00966,
-0.08611,1.31463,-0.00524,
-0.09008,1.31005,-0.01032,
-0.09493,1.31596,-0.00147,
-0.09008,1.31005,-0.01032,
-0.09742,1.31077,-0.00592,
-0.09493,1.31596,-0.00147,
-0.05342,1.25298,0.00928,
-0.0609,1.25604,0.01715,
-0.06045,1.25965,0.00845,
-0.0609,1.25604,0.01715,
-0.06332,1.25867,0.01678,
-0.06045,1.25965,0.00845,
-0.02972,1.23563,0.07368,
-0.02367,1.22848,0.07047,
-0.02085,1.23626,0.07781,
-0.02367,1.22848,0.07047,
-0.0123,1.22821,0.07384,
-0.02085,1.23626,0.07781,
-0.00879,1.26171,0.09069,
-0.0076,1.25064,0.088,
0,1.25884,0.09174,
-0.0076,1.25064,0.088,
0,1.25112,0.08923,
0,1.25884,0.09174,
0,1.21761,0.03547,
0,1.21552,0.04637,
-0.00723,1.21695,0.04555,
-0.07023,1.27958,0.03767,
-0.0667,1.27327,0.05123,
-0.06858,1.27965,0.0514,
-0.01315,1.23656,0.08006,
-0.02085,1.23626,0.07781,
-0.0123,1.22821,0.07384,
-0.05723,1.2522,0.01787,
-0.0609,1.25604,0.01715,
-0.05342,1.25298,0.00928,
-0.06045,1.25965,0.00845,
-0.07262,1.27969,-0.00539,
-0.06285,1.26415,-0.00309,
-0.0777,1.31122,0.01351,
-0.08438,1.28612,0.00471,
-0.07573,1.29881,0.01441,
-0.09008,1.31005,-0.01032,
-0.08611,1.31463,-0.00524,
-0.07882,1.29978,-0.00559,
-0.08438,1.28612,0.00471,
-0.09604,1.28501,-0.00966,
-0.09014,1.27388,-0.0067,
-0.06724,1.26686,0.01577,
-0.07484,1.2602,0.00476,
-0.06332,1.25867,0.01678,
-0.08438,1.28612,0.00471,
-0.09742,1.31077,-0.00592,
-0.09808,1.298,-0.00908,
-0.07484,1.2602,0.00476,
-0.08438,1.28612,0.00471,
-0.08302,1.26481,-0.00194,
-0.08438,1.28612,0.00471,
-0.08818,1.31783,0.00459,
-0.09493,1.31596,-0.00147,
-0.07262,1.27969,-0.00539,
-0.06045,1.25965,0.00845,
-0.06792,1.26025,0.00094,
-0.08692,1.28517,-0.01328,
-0.07262,1.27969,-0.00539,
-0.08186,1.2745,-0.00958,
-0.0767,1.26597,-0.00501,
-0.07262,1.27969,-0.00539,
-0.06792,1.26025,0.00094,
-0.08692,1.28517,-0.01328,
-0.09021,1.29688,-0.01302,
-0.07262,1.27969,-0.00539,
-0.08438,1.28612,0.00471,
-0.0777,1.31122,0.01351,
-0.08818,1.31783,0.00459,
-0.08818,1.31783,0.00459,
-0.0777,1.31122,0.01351,
-0.07816,1.31599,0.00132,
-0.07262,1.27969,-0.00539,
-0.09008,1.31005,-0.01032,
-0.07882,1.29978,-0.00559,
-0.08186,1.2745,-0.00958,
-0.07262,1.27969,-0.00539,
-0.0767,1.26597,-0.00501,
-0.08438,1.28612,0.00471,
-0.09014,1.27388,-0.0067,
-0.08302,1.26481,-0.00194,
-0.08438,1.28612,0.00471,
-0.07406,1.28893,0.01498,
-0.07573,1.29881,0.01441,
-0.08438,1.28612,0.00471,
-0.07484,1.2602,0.00476,
-0.06724,1.26686,0.01577,
-0.08438,1.28612,0.00471,
-0.07177,1.28009,0.01507,
-0.07406,1.28893,0.01498,
-0.08438,1.28612,0.00471,
-0.09808,1.298,-0.00908,
-0.09604,1.28501,-0.00966,
-0.07262,1.27969,-0.00539,
-0.09021,1.29688,-0.01302,
-0.09008,1.31005,-0.01032,
-0.08438,1.28612,0.00471,
-0.09493,1.31596,-0.00147,
-0.09742,1.31077,-0.00592,
-0.07882,1.29978,-0.00559,
-0.08611,1.31463,-0.00524,
-0.07816,1.31599,0.00132,
-0.08438,1.28612,0.00471,
-0.06724,1.26686,0.01577,
-0.07177,1.28009,0.01507,
0,1.26362,0.09341,
-0.00879,1.26171,0.09069,
0,1.25884,0.09174,
-0.00931,1.32109,0.08153,
-0.01438,1.31283,0.07895,
-0.00789,1.31027,0.08043,
-0.07198,1.32098,0.05679,
-0.07308,1.334,0.0574,
-0.07426,1.32357,0.05277,
-0.06885,1.31663,0.06067,
-0.06632,1.32308,0.0629,
-0.07198,1.32098,0.05679,
-0.06632,1.32308,0.0629,
-0.07028,1.32903,0.06039,
-0.07198,1.32098,0.05679,
-0.07509,1.33874,0.05462,
-0.07054,1.35545,0.05999,
-0.07662,1.34467,0.04784,
-0.0215,1.33679,0.08078,
-0.00931,1.32109,0.08153,
-0.0113,1.33435,0.08343,
-0.03228,1.34693,0.07827,
-0.03255,1.34161,0.07752,
-0.01861,1.34091,0.08232,
-0.0215,1.33679,0.08078,
-0.0113,1.33435,0.08343,
-0.01861,1.34091,0.08232,
-0.03255,1.34161,0.07752,
-0.0331,1.33154,0.07653,
-0.02158,1.32687,0.07898,
-0.07198,1.32098,0.05679,
-0.07028,1.32903,0.06039,
-0.07308,1.334,0.0574,
-0.07426,1.32357,0.05277,
-0.07591,1.32547,0.04546,
-0.07447,1.31052,0.04458,
-0.07308,1.334,0.0574,
-0.07509,1.33874,0.05462,
-0.07426,1.32357,0.05277,
-0.03282,1.22791,0.0613,
-0.02235,1.22209,0.06108,
-0.03093,1.22799,0.06463,
-0.02235,1.22209,0.06108,
-0.02201,1.22309,0.06484,
-0.03093,1.22799,0.06463,
-0.0731,1.30975,0.0524,
-0.07198,1.32098,0.05679,
-0.07426,1.32357,0.05277,
-0.0579,1.28663,0.06556,
-0.06362,1.28358,0.06286,
-0.05984,1.28019,0.06589,
-0.0579,1.28663,0.06556,
-0.06242,1.29301,0.06351,
-0.06362,1.28358,0.06286,
-0.06242,1.29301,0.06351,
-0.06782,1.29159,0.05893,
-0.06362,1.28358,0.06286,
-0.06362,1.28358,0.06286,
-0.06782,1.29159,0.05893,
-0.06572,1.2798,0.06003,
-0.05322,1.30294,0.07019,
-0.06398,1.3,0.06301,
-0.06242,1.29301,0.06351,
-0.05322,1.30294,0.07019,
-0.06391,1.30679,0.06338,
-0.06398,1.3,0.06301,
-0.05322,1.30294,0.07019,
-0.06242,1.29301,0.06351,
-0.0579,1.28663,0.06556,
-0.0553,1.32054,0.06849,
-0.06281,1.32678,0.0652,
-0.06632,1.32308,0.0629,
-0.06239,1.31339,0.06486,
-0.05322,1.30294,0.07019,
-0.0553,1.32054,0.06849,
-0.05697,1.33119,0.0684,
-0.06281,1.32678,0.0652,
-0.0553,1.32054,0.06849,
-0.04321,1.30286,0.07431,
-0.04407,1.32169,0.07304,
-0.05322,1.30294,0.07019,
-0.04407,1.32169,0.07304,
-0.0553,1.32054,0.06849,
-0.05322,1.30294,0.07019,
-0.01861,1.34091,0.08232,
-0.0113,1.33435,0.08343,
-0.0119,1.34577,0.08461,
-0.0215,1.33679,0.08078,
-0.01861,1.34091,0.08232,
-0.03255,1.34161,0.07752,
-0.00931,1.32109,0.08153,
-0.0215,1.33679,0.08078,
-0.02158,1.32687,0.07898,
-0.03255,1.34161,0.07752,
-0.02158,1.32687,0.07898,
-0.0215,1.33679,0.08078,
-0.07447,1.31052,0.04458,
-0.0731,1.30975,0.0524,
-0.07426,1.32357,0.05277,
-0.07385,1.29993,0.03892,
-0.07238,1.30016,0.05146,
-0.07447,1.31052,0.04458,
-0.06239,1.31339,0.06486,
-0.06632,1.32308,0.0629,
-0.06885,1.31663,0.06067,
-0.06239,1.31339,0.06486,
-0.0553,1.32054,0.06849,
-0.06632,1.32308,0.0629,
-0.03204,1.23539,0.0016,
-0.04196,1.24137,0.01174,
-0.03862,1.24182,-0.00035,
-0.07591,1.32547,0.04546,
-0.07426,1.32357,0.05277,
-0.07509,1.33874,0.05462,
-0.07591,1.32547,0.04546,
-0.07509,1.33874,0.05462,
-0.07662,1.34467,0.04784,
-0.07662,1.34467,0.04784,
-0.07767,1.32552,0.03517,
-0.07591,1.32547,0.04546,
-0.07385,1.29993,0.03892,
-0.07447,1.31052,0.04458,
-0.07594,1.31054,0.03424,
-0.07767,1.32552,0.03517,
-0.0794,1.3395,0.02451,
-0.07847,1.32401,0.02407,
-0.07549,1.35591,0.04042,
-0.06907,1.36504,0.05375,
-0.06877,1.37605,0.04344,
-0.07549,1.35591,0.04042,
-0.06877,1.37605,0.04344,
-0.07521,1.36243,0.03473,
-0.01477,1.22046,0.03429,
-0.01466,1.21907,0.0448,
-0.02345,1.22239,0.04425,
-0.01477,1.22046,0.03429,
-0.00723,1.21695,0.04555,
-0.01466,1.21907,0.0448,
-0.02503,1.23017,0.00403,
-0.03478,1.23507,0.01392,
-0.03204,1.23539,0.0016,
-0.02721,1.23366,-0.00837,
-0.02503,1.23017,0.00403,
-0.03204,1.23539,0.0016,
-0.03316,1.23991,-0.01178,
-0.03862,1.24182,-0.00035,
-0.0465,1.25056,-0.00266,
-0.01477,1.22046,0.03429,
0,1.21761,0.03547,
-0.00723,1.21695,0.04555,
-0.03154,1.22797,0.03229,
-0.01477,1.22046,0.03429,
-0.02345,1.22239,0.04425,
-0.02503,1.23017,0.00403,
-0.02579,1.22936,0.01621,
-0.03478,1.23507,0.01392,
-0.03204,1.23539,0.0016,
-0.03478,1.23507,0.01392,
-0.04196,1.24137,0.01174,
-0.03862,1.24182,-0.00035,
-0.04196,1.24137,0.01174,
-0.04692,1.24657,0.00995,
-0.05342,1.25298,0.00928,
-0.06045,1.25965,0.00845,
-0.06285,1.26415,-0.00309,
-0.0465,1.25056,-0.00266,
-0.0434,1.24935,-0.0162,
-0.03316,1.23991,-0.01178,
-0.0434,1.24935,-0.0162,
-0.0465,1.25056,-0.00266,
-0.06285,1.26415,-0.00309,
-0.03316,1.23991,-0.01178,
-0.0434,1.24935,-0.0162,
-0.0228,1.23744,-0.02778,
-0.06362,1.28358,0.06286,
-0.06572,1.2798,0.06003,
-0.05984,1.28019,0.06589,
-0.07509,1.33874,0.05462,
-0.06822,1.34604,0.06187,
-0.07054,1.35545,0.05999,
-0.0591,1.37122,0.06388,
-0.06907,1.36504,0.05375,
-0.07054,1.35545,0.05999,
-0.0609,1.25604,0.01715,
-0.06318,1.25883,0.02643,
-0.06332,1.25867,0.01678,
-0.07878,1.32355,0.01812,
-0.07847,1.32401,0.02407,
-0.0794,1.3395,0.02451,
-0.0794,1.3395,0.02451,
-0.07549,1.35591,0.04042,
-0.07521,1.36243,0.03473,
-0.01271,1.22073,0.0666,
-0.02367,1.22848,0.07047,
-0.02201,1.22309,0.06484,
};
// ¶¥µãÊý×é¶ÔÏó Vertex Array Object, VAO
// ¶¥µã»º³å¶ÔÏó Vertex Buffer Object£¬VBO
// Ë÷Òý»º³å¶ÔÏó£ºElement Buffer Object£¬EBO»òIndex Buffer Object£¬IBO
GLuint VBO, VAO;
glGenVertexArrays(1, &VAO);
glGenBuffers(1, &VBO);
// !!! Bind the Vertex Array Object first, then bind and set vertex buffer(s) and attribute pointer(s).
glBindVertexArray(VAO);
// °Ñ¶¥µãÊý×鏴֯µ½»º³åÖй©OpenGLʹÓÃ
glBindBuffer(GL_ARRAY_BUFFER, VBO);
glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW);
// ÉèÖö¥µãpositionÊôÐÔÖ¸Õë
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 3 * sizeof(GLfloat), (GLvoid*)0);
glEnableVertexAttribArray(0);
// ÉèÖö¥µãTexCoordÊôÐÔÖ¸Õë
//glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, 5 * sizeof(GLfloat), (GLvoid*)(3 * sizeof(GLfloat)));
//glEnableVertexAttribArray(1);
glBindVertexArray(0);
// ʹÓÃÏß¿òģʽ½øÐÐäÖȾ
glPolygonMode(GL_FRONT_AND_BACK, GL_LINE);
// ¶ÁÈ¡²¢´´½¨Ìùͼ
GLuint texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture); // Ö®ºó¶ÔGL_TEXTURE_2DµÄËùÒÔ²Ù×÷¶¼×÷ÓÃÔÚtexture¶ÔÏóÉÏ
// ÉèÖÃÎÆÀí»·ÈÆ·½Ê½
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT);
// ÉèÖÃÎÆÀí¹ýÂË·½Ê½
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
// äÖȾ
while (!glfwWindowShouldClose(window))
{
// Calculate deltatime of current frame
GLfloat currentFrame = glfwGetTime();
deltaTime = currentFrame - lastFrame;
lastFrame = currentFrame;
// ¼ì²éʼþ
glfwPollEvents();
do_movement();
// äÖȾָÁî
glClearColor(0.2f, 0.3f, 0.3f, 1.0f);
// glClear(GL_COLOR_BUFFER_BIT);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// °ó¶¨texture
glBindTexture(GL_TEXTURE_2D, texture);
// »æÍ¼
ourShader.Use();
glBindVertexArray(VAO);
// ÉèÖÃÏà»ú²ÎÊý
//glm::mat4 model;
glm::mat4 view;
glm::mat4 projection;
view = camera.GetViewMatrix();
projection = glm::perspective(camera.Zoom, (GLfloat)mWidth / (GLfloat)mHeight, 0.1f, 1000.0f);
// Get their uniform location
GLint modelLoc = glGetUniformLocation(ourShader.Program, "model");
GLint viewLoc = glGetUniformLocation(ourShader.Program, "view");
GLint projLoc = glGetUniformLocation(ourShader.Program, "projection");
// Pass them to the shaders
glm::mat4 model;
model = glm::translate(model, glm::vec3(0.0f, -13.0f, 0.0f));
//model = glm::rotate(model, (GLfloat)glfwGetTime() * 1.0f, glm::vec3(0.5f, 1.0f, 0.0f));
model = glm::scale(model, glm::vec3(10.0f, 10.0f, 10.0f));
glUniformMatrix4fv(modelLoc, 1, GL_FALSE, glm::value_ptr(model));
glDrawArrays(GL_TRIANGLES, 0, 9108);
glBindVertexArray(0);
glUniformMatrix4fv(viewLoc, 1, GL_FALSE, glm::value_ptr(view));
// Note: currently we set the projection matrix each frame, but since the projection matrix rarely changes it's often best practice to set it outside the main loop only once.
glUniformMatrix4fv(projLoc, 1, GL_FALSE, glm::value_ptr(projection));
// ½»»»»º³å
glfwSwapBuffers(window);
}
// ÊÍ·ÅVAO,VBO
glDeleteVertexArrays(1, &VAO);
glDeleteBuffers(1, &VBO);
// ÊÍ·ÅGLFW·ÖÅäµÄÄÚ´æ
glfwTerminate();
return 0;
}
void key_callback(GLFWwindow* window, int key, int scancode, int action, int mode)
{
// µ±Óû§°´ÏÂESC¼ü,ÎÒÃÇÉèÖÃwindow´°¿ÚµÄWindowShouldCloseÊôÐÔΪtrue
// ¹Ø±ÕÓ¦ÓóÌÐò
if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS)
glfwSetWindowShouldClose(window, GL_TRUE);
if (key >= 0 && key < 1024)
{
if (action == GLFW_PRESS)
keys[key] = true;
else if (action == GLFW_RELEASE)
keys[key] = false;
}
}
void do_movement()
{
// ÉãÏñ»ú¿ØÖÆ
GLfloat cameraSpeed = 5.0f * deltaTime;
if (keys[GLFW_KEY_W] || keys[GLFW_KEY_UP])
camera.ProcessKeyboard(FORWARD, deltaTime);
if (keys[GLFW_KEY_S] || keys[GLFW_KEY_DOWN])
camera.ProcessKeyboard(BACKWARD, deltaTime);
if (keys[GLFW_KEY_A] || keys[GLFW_KEY_LEFT])
camera.ProcessKeyboard(LEFT, deltaTime);
if (keys[GLFW_KEY_D] || keys[GLFW_KEY_RIGHT])
camera.ProcessKeyboard(RIGHT, deltaTime);
}
void mouse_callback(GLFWwindow* window, double xpos, double ypos)
{
if (firstMouse)
{
lastX = xpos;
lastY = ypos;
firstMouse = false;
}
GLfloat xoffset = xpos - lastX;
GLfloat yoffset = lastY - ypos; // ×¢ÒâÕâÀïÊÇÏà·´µÄ£¬ÒòΪy×ø±êÊǴӵײ¿Íù¶¥²¿ÒÀ´ÎÔö´óµÄ
lastX = xpos;
lastY = ypos;
camera.ProcessMouseMovement(xoffset, yoffset);
}
void scroll_callback(GLFWwindow* window, double xoffset, double yoffset)
{
camera.ProcessMouseScroll(yoffset);
}<|fim▁end|>
| |
<|file_name|>iam.rs<|end_file_name|><|fim▁begin|>//! Amazon Identity and Access Management
<|fim▁hole|><|fim▁end|>
|
#![cfg_attr(feature = "nightly-testing", allow(while_let_loop))]
include!(concat!(env!("OUT_DIR"), "/iam.rs"));
|
<|file_name|>statement.ts<|end_file_name|><|fim▁begin|>import { Month } from '../core/month';
import { Balance } from '../core/balance';
// Abstraction for a financial statement for a period of time.
export abstract class Statement {
name: string;
// Period of time for the statement
month: Month;
// Recorded start balance for the statement.
startBalance?: Balance;
// Recorded end balance for the statement.
endBalance?: Balance;
// Total transaction inflows.
inFlows = 0;
// Total transaction outflows.
outFlows = 0;
// Amount transfered to other accounts by same owner.
totalTransfers = 0;
// Amount transfered to external entities.
totalPayments = 0;
// Amount transfered from external entities.
income = 0;
constructor(name: string, month: Month) {
this.name = name;
this.month = month;
}
get addSub(): number {
return this.inFlows + this.outFlows;
}
get change(): number | undefined {
const startAmount = this.startBalance?.amount;
if (startAmount === undefined) return undefined;
const endAmount = this.endBalance?.amount;
if (endAmount === undefined) return undefined;
return endAmount - startAmount;
}
get percentChange(): number | undefined {
const startAmount = this.startBalance?.amount;
const change = this.change;
return change && startAmount && (100 * change) / startAmount;
}
get unaccounted(): number | undefined {
const change = this.change;
return change !== undefined ? change - this.addSub : undefined;
}
abstract get isClosed(): boolean;
addInFlow(inFlow: number): void {
if (inFlow > 0) {
this.inFlows += inFlow;
} else {
this.outFlows += inFlow;
}
}
addOutFlow(outFlow: number): void {
if (outFlow > 0) {
this.inFlows += outFlow;
} else {
this.outFlows += outFlow;<|fim▁hole|>}<|fim▁end|>
|
}
}
|
<|file_name|>c_win32.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! C definitions used by libnative that don't belong in liblibc
#![allow(type_overflow)]
use libc;
pub static WSADESCRIPTION_LEN: uint = 256;
pub static WSASYS_STATUS_LEN: uint = 128;
pub static FIONBIO: libc::c_long = 0x8004667e;
static FD_SETSIZE: uint = 64;
pub static MSG_DONTWAIT: libc::c_int = 0;
#[repr(C)]
pub struct WSADATA {
pub wVersion: libc::WORD,
pub wHighVersion: libc::WORD,
pub szDescription: [u8, ..WSADESCRIPTION_LEN + 1],
pub szSystemStatus: [u8, ..WSASYS_STATUS_LEN + 1],
pub iMaxSockets: u16,
pub iMaxUdpDg: u16,
pub lpVendorInfo: *mut u8,
}
pub type LPWSADATA = *mut WSADATA;
#[repr(C)]
pub struct fd_set {
fd_count: libc::c_uint,
fd_array: [libc::SOCKET, ..FD_SETSIZE],
}
pub fn fd_set(set: &mut fd_set, s: libc::SOCKET) {
set.fd_array[set.fd_count as uint] = s;
set.fd_count += 1;
}
#[link(name = "ws2_32")]
extern "system" {
pub fn WSAStartup(wVersionRequested: libc::WORD,
lpWSAData: LPWSADATA) -> libc::c_int;
pub fn WSAGetLastError() -> libc::c_int;
pub fn ioctlsocket(s: libc::SOCKET, cmd: libc::c_long,
argp: *mut libc::c_ulong) -> libc::c_int;
pub fn select(nfds: libc::c_int,
readfds: *mut fd_set,
writefds: *mut fd_set,
exceptfds: *mut fd_set,
timeout: *mut libc::timeval) -> libc::c_int;
pub fn getsockopt(sockfd: libc::SOCKET,
level: libc::c_int,
optname: libc::c_int,
optval: *mut libc::c_char,
optlen: *mut libc::c_int) -> libc::c_int;
pub fn CancelIo(hFile: libc::HANDLE) -> libc::BOOL;
pub fn CancelIoEx(hFile: libc::HANDLE,
lpOverlapped: libc::LPOVERLAPPED) -> libc::BOOL;
}
pub mod compat {
use std::intrinsics::{atomic_store_relaxed, transmute};
use std::iter::Iterator;
use libc::types::os::arch::extra::{LPCWSTR, HMODULE, LPCSTR, LPVOID};
extern "system" {
fn GetModuleHandleW(lpModuleName: LPCWSTR) -> HMODULE;
fn GetProcAddress(hModule: HMODULE, lpProcName: LPCSTR) -> LPVOID;
}
// store_func() is idempotent, so using relaxed ordering for the atomics
// should be enough. This way, calling a function in this compatibility
// layer (after it's loaded) shouldn't be any slower than a regular DLL
// call.
unsafe fn store_func(ptr: *mut uint, module: &str, symbol: &str, fallback: uint) {
let module: Vec<u16> = module.utf16_units().collect();
let module = module.append_one(0);
symbol.with_c_str(|symbol| {
let handle = GetModuleHandleW(module.as_ptr());
let func: uint = transmute(GetProcAddress(handle, symbol));
atomic_store_relaxed(ptr, if func == 0 {
fallback
} else {
func
})
})
}<|fim▁hole|> /// # Example
/// ```
/// compat_fn!(adll32::SomeFunctionW(_arg: LPCWSTR) {
/// // Fallback implementation
/// })
/// ```
///
/// Note that arguments unused by the fallback implementation should not be called `_` as
/// they are used to be passed to the real function if available.
macro_rules! compat_fn(
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*)
-> $rettype:ty $fallback:block) => (
#[inline(always)]
pub unsafe fn $symbol($($argname: $argtype),*) -> $rettype {
static mut ptr: extern "system" fn($($argname: $argtype),*) -> $rettype = thunk;
extern "system" fn thunk($($argname: $argtype),*) -> $rettype {
unsafe {
::io::c::compat::store_func(&mut ptr as *mut _ as *mut uint,
stringify!($module),
stringify!($symbol),
fallback as uint);
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
}
extern "system" fn fallback($($argname: $argtype),*) -> $rettype $fallback
::std::intrinsics::atomic_load_relaxed(&ptr)($($argname),*)
}
);
($module:ident::$symbol:ident($($argname:ident: $argtype:ty),*) $fallback:block) => (
compat_fn!($module::$symbol($($argname: $argtype),*) -> () $fallback)
)
)
/// Compatibility layer for functions in `kernel32.dll`
///
/// Latest versions of Windows this is needed for:
///
/// * `CreateSymbolicLinkW`: Windows XP, Windows Server 2003
/// * `GetFinalPathNameByHandleW`: Windows XP, Windows Server 2003
pub mod kernel32 {
use libc::types::os::arch::extra::{DWORD, LPCWSTR, BOOLEAN, HANDLE};
use libc::consts::os::extra::ERROR_CALL_NOT_IMPLEMENTED;
extern "system" {
fn SetLastError(dwErrCode: DWORD);
}
compat_fn!(kernel32::CreateSymbolicLinkW(_lpSymlinkFileName: LPCWSTR,
_lpTargetFileName: LPCWSTR,
_dwFlags: DWORD) -> BOOLEAN {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
compat_fn!(kernel32::GetFinalPathNameByHandleW(_hFile: HANDLE,
_lpszFilePath: LPCWSTR,
_cchFilePath: DWORD,
_dwFlags: DWORD) -> DWORD {
unsafe { SetLastError(ERROR_CALL_NOT_IMPLEMENTED as DWORD); }
0
})
}
}<|fim▁end|>
|
/// Macro for creating a compatibility fallback for a Windows function
///
|
<|file_name|>test_dave_reader.py<|end_file_name|><|fim▁begin|>from test.fixture import *
from hypothesis import given
from hypothesis.strategies import text
from astropy.io import fits
import utils.dave_reader as DaveReader
from utils.dave_reader import save_to_intermediate_file, load_dataset_from_intermediate_file
import utils.file_utils as FileUtils
from stingray.events import EventList
from stingray import Lightcurve, Powerspectrum, AveragedCrossspectrum
from hendrics.io import HEN_FILE_EXTENSION
import numpy as np
class TestStingrayTypes():
@classmethod
def setup_class(cls):
cls.dum = 'bubu' + HEN_FILE_EXTENSION
def test_load_and_save_events(self):
events = EventList([0, 2, 3.], pi=[1, 2, 3], mjdref=54385.3254923845,
gti = np.longdouble([[-0.5, 3.5]]))
events.energy = np.array([3., 4., 5.])
save_to_intermediate_file(events, self.dum)
ds = load_dataset_from_intermediate_file(self.dum)
assert ds
def test_load_and_save_lcurve(self):
lcurve = Lightcurve(np.linspace(0, 10, 15), np.random.poisson(30, 15),
mjdref=54385.3254923845,
gti = np.longdouble([[-0.5, 3.5]]))
save_to_intermediate_file(lcurve, self.dum)
ds = load_dataset_from_intermediate_file(self.dum)
assert ds
@given(text())
def test_get_txt_dataset(s):
destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_1.txt")
table_id = "EVENTS"
header_names = ["TIME", "PHA", "Color1", "Color2"]
dataset = DaveReader.get_txt_dataset(destination, table_id, header_names)
num_rows = 10
assert dataset
assert len(dataset.tables) == 2
assert table_id in dataset.tables
table = dataset.tables[table_id]
assert len(table.columns) == len(header_names)
assert len(table.columns[header_names[0]].values) == num_rows
<|fim▁hole|> destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc")
ds_id = "fits_table"
table_ids = ["Primary", "RATE", "STDGTI"]
hdulist = fits.open(destination)
dataset = DaveReader.get_fits_dataset(hdulist, ds_id, table_ids)
assert dataset
assert len(dataset.tables) == 2
assert table_ids[1] in dataset.tables
assert len(dataset.tables[table_ids[1]].columns) == 4
@given(text())
def test_get_fits_table_column_names(s):
destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt")
# Opening Fits
hdulist = fits.open(destination)
column_names = DaveReader.get_fits_table_column_names(hdulist, "EVENTS")
assert len(column_names) == 2
@given(text())
def test_get_fits_dataset_evt(s):
destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt")
ds_id = "fits_table"
table_ids = ["Primary", "EVENTS", "GTI"]
hdulist = fits.open(destination)
dataset = DaveReader.get_fits_dataset(hdulist, ds_id, table_ids)
assert dataset
assert len(dataset.tables) == 2
assert table_ids[1] in dataset.tables
assert len(dataset.tables[table_ids[1]].columns) == 2
@given(text())
def test_get_events_fits_dataset_with_stingray(s):
destination = FileUtils.get_destination(TEST_RESOURCES, "test.evt")
ds_id = "fits_table"
table_ids = ["Primary", "EVENTS", "GTI"]
# Opening Fits
hdulist = fits.open(destination)
dataset = DaveReader.get_events_fits_dataset_with_stingray(destination, hdulist)
assert dataset
assert len(dataset.tables) == 2
assert table_ids[1] in dataset.tables
assert len(dataset.tables[table_ids[1]].columns) == 2
@given(text())
def test_get_lightcurve_fits_dataset_with_stingray(s):
destination = FileUtils.get_destination(TEST_RESOURCES, "PN_source_lightcurve_raw.lc")
# Opening Fits
hdulist = fits.open(destination)
dataset = DaveReader.get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
column='TIME', gtistring='GTI,STDGTI')
assert dataset
@given(text())
def test_get_file_dataset(s):
destination = FileUtils.get_destination(TEST_RESOURCES, "Test_Input_2.lc")
ds_id = "fits_table"
table_ids = ["Primary", "RATE", "STDGTI"]
hdulist = fits.open(destination)
dataset = DaveReader.get_fits_dataset(hdulist, ds_id, table_ids)
assert dataset
assert len(dataset.tables) == 2
assert table_ids[1] in dataset.tables<|fim▁end|>
|
@given(text())
def test_get_fits_dataset_lc(s):
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from django.contrib import messages
from django.core.mail import EmailMultiAlternatives
from django.conf import settings
from .models import Email
from users.models import User
@admin.register(Email)
class EmailAdmin(admin.ModelAdmin):
list_display = ('subject', 'sent', 'created', 'modified', 'get_approvers', 'is_sendable')
list_filter = ('sent', 'created', 'modified')
search_fields = ('subject', 'content', )
readonly_fields = ('sent', 'approvers', 'markdown_content')
actions = ['approve', "send_email", "send_test_email"]
def get_approvers(self, obj):
return ", ".join([u.username for u in obj.approvers.all()])
get_approvers.short_description = "Approbateurs"
def is_sendable(self, obj):
return obj.approvers.count() >= settings.MINIMAL_MAIL_APPROVERS and not obj.sent
is_sendable.short_description = "Est envoyable"
def approve(self, request, queryset):
if not queryset.count() == 1:
self.message_user(request, message="Vous ne devez séléctionner qu'un email à approuver", level=messages.ERROR)
return
email = queryset.first()
email.approvers.add(request.user)
self.message_user(request, "L'email a été approuvé.")
approve.short_description = "Approuver cet email"
def send_email(self, request, queryset):
if not queryset.count() == 1:
self.message_user(request, message="Vous ne devez séléctionner qu'un email à envoyer", level=messages.ERROR)
return
<|fim▁hole|>
if email.sent:
self.message_user(request, message="Cet email a déjà été envoyé", level=messages.ERROR)
return
if email.approvers.count() < settings.MINIMAL_MAIL_APPROVERS:
self.message_user(request, message="Ce message n'a pas assez d'approbateurs", level=messages.ERROR)
return
recipients = [u.email for u in User.objects.filter(newsletter=True)]
message = EmailMultiAlternatives(
subject=email.subject,
body=email.content,
from_email='Newsletter UrLab <[email protected]>',
to=["UrLab <[email protected]>"],
bcc=recipients,
)
message.attach_alternative(email.markdown_content(), "text/html")
message.send()
email.sent = True
email.save()
self.message_user(request, "L'email a été énvoyé.")
send_email.short_description = "Envoyer cet email A TOUT LE MONDE"
def send_test_email(self, request, queryset):
if not queryset.count() == 1:
self.message_user(request, message="Vous ne devez séléctionner qu'un email à envoyer", level=messages.ERROR)
return
email = queryset.first()
if email.sent:
self.message_user(request, message="Cet email a déjà été envoyé", level=messages.ERROR)
return
message = EmailMultiAlternatives(
subject=email.subject,
body=email.content,
from_email='Newsletter UrLab <[email protected]>',
to=["[email protected]"],
bcc=[request.user.email],
)
message.attach_alternative(email.markdown_content(), "text/html")
message.send()
self.message_user(request, "L'email a été énvoyé à votre adresse")
send_test_email.short_description = "Envoyer cet email A MOI UNIQUEMENT"<|fim▁end|>
|
email = queryset.first()
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|>from south.db import db
from django.db import models
from transifex.releases.models import *
class Migration:
def forwards(self, orm):<|fim▁hole|> "Write your backwards migration here"
models = {
}
complete_apps = ['releases']<|fim▁end|>
|
"Write your forwards migration here"
def backwards(self, orm):
|
<|file_name|>uvio.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use option::*;
use result::*;
use super::uv::*;
use super::rtio::*;
use ops::Drop;
use cell::{Cell, empty_cell};
use cast::transmute;
use super::sched::Scheduler;
#[cfg(test)] use super::sched::Task;
#[cfg(test)] use unstable::run_in_bare_thread;
#[cfg(test)] use uint;
pub struct UvEventLoop {
uvio: UvIoFactory
}
pub impl UvEventLoop {
fn new() -> UvEventLoop {
UvEventLoop {
uvio: UvIoFactory(Loop::new())
}
}
/// A convenience constructor
fn new_scheduler() -> Scheduler {
Scheduler::new(~UvEventLoop::new())
}
}
impl Drop for UvEventLoop {
fn finalize(&self) {
// XXX: Need mutable finalizer
let self = unsafe {
transmute::<&UvEventLoop, &mut UvEventLoop>(self)
};
let mut uv_loop = self.uvio.uv_loop();
uv_loop.close();
}
}
impl EventLoop for UvEventLoop {
fn run(&mut self) {
self.uvio.uv_loop().run();
}
fn callback(&mut self, f: ~fn()) {
let mut idle_watcher = IdleWatcher::new(self.uvio.uv_loop());
do idle_watcher.start |idle_watcher, status| {
assert!(status.is_none());
let mut idle_watcher = idle_watcher;
idle_watcher.stop();
idle_watcher.close();
f();
}
}
fn io(&mut self) -> Option<&'self mut IoFactoryObject> {
Some(&mut self.uvio)
}
}
#[test]<|fim▁hole|> let mut count = 0;
let count_ptr: *mut int = &mut count;
do event_loop.callback {
unsafe { *count_ptr += 1 }
}
event_loop.run();
assert!(count == 1);
}
}
pub struct UvIoFactory(Loop);
pub impl UvIoFactory {
fn uv_loop(&mut self) -> &'self mut Loop {
match self { &UvIoFactory(ref mut ptr) => ptr }
}
}
impl IoFactory for UvIoFactory {
// Connect to an address and return a new stream
// NB: This blocks the task waiting on the connection.
// It would probably be better to return a future
fn connect(&mut self, addr: IpAddr) -> Option<~StreamObject> {
// Create a cell in the task to hold the result. We will fill
// the cell before resuming the task.
let result_cell = empty_cell();
let result_cell_ptr: *Cell<Option<~StreamObject>> = &result_cell;
do Scheduler::local |scheduler| {
assert!(scheduler.in_task_context());
// Block this task and take ownership, switch to scheduler context
do scheduler.block_running_task_and_then |scheduler, task| {
rtdebug!("connect: entered scheduler context");
assert!(!scheduler.in_task_context());
let mut tcp_watcher = TcpWatcher::new(self.uv_loop());
let task_cell = Cell(task);
// Wait for a connection
do tcp_watcher.connect(addr) |stream_watcher, status| {
rtdebug!("connect: in connect callback");
let maybe_stream = if status.is_none() {
rtdebug!("status is none");
Some(~UvStream(stream_watcher))
} else {
rtdebug!("status is some");
stream_watcher.close(||());
None
};
// Store the stream in the task's stack
unsafe { (*result_cell_ptr).put_back(maybe_stream); }
// Context switch
do Scheduler::local |scheduler| {
scheduler.resume_task_immediately(task_cell.take());
}
}
}
}
assert!(!result_cell.is_empty());
return result_cell.take();
}
fn bind(&mut self, addr: IpAddr) -> Option<~TcpListenerObject> {
let mut watcher = TcpWatcher::new(self.uv_loop());
watcher.bind(addr);
return Some(~UvTcpListener(watcher));
}
}
pub struct UvTcpListener(TcpWatcher);
impl UvTcpListener {
fn watcher(&self) -> TcpWatcher {
match self { &UvTcpListener(w) => w }
}
fn close(&self) {
// XXX: Need to wait until close finishes before returning
self.watcher().as_stream().close(||());
}
}
impl Drop for UvTcpListener {
fn finalize(&self) {
// XXX: Again, this never gets called. Use .close() instead
//self.watcher().as_stream().close(||());
}
}
impl TcpListener for UvTcpListener {
fn listen(&mut self) -> Option<~StreamObject> {
rtdebug!("entering listen");
let result_cell = empty_cell();
let result_cell_ptr: *Cell<Option<~StreamObject>> = &result_cell;
let server_tcp_watcher = self.watcher();
do Scheduler::local |scheduler| {
assert!(scheduler.in_task_context());
do scheduler.block_running_task_and_then |_, task| {
let task_cell = Cell(task);
let mut server_tcp_watcher = server_tcp_watcher;
do server_tcp_watcher.listen |server_stream_watcher, status| {
let maybe_stream = if status.is_none() {
let mut server_stream_watcher = server_stream_watcher;
let mut loop_ = loop_from_watcher(&server_stream_watcher);
let mut client_tcp_watcher = TcpWatcher::new(&mut loop_);
let mut client_tcp_watcher = client_tcp_watcher.as_stream();
// XXX: Need's to be surfaced in interface
server_stream_watcher.accept(client_tcp_watcher);
Some(~UvStream::new(client_tcp_watcher))
} else {
None
};
unsafe { (*result_cell_ptr).put_back(maybe_stream); }
rtdebug!("resuming task from listen");
// Context switch
do Scheduler::local |scheduler| {
scheduler.resume_task_immediately(task_cell.take());
}
}
}
}
assert!(!result_cell.is_empty());
return result_cell.take();
}
}
pub struct UvStream(StreamWatcher);
impl UvStream {
fn new(watcher: StreamWatcher) -> UvStream {
UvStream(watcher)
}
fn watcher(&self) -> StreamWatcher {
match self { &UvStream(w) => w }
}
// XXX: finalize isn't working for ~UvStream???
fn close(&self) {
// XXX: Need to wait until this finishes before returning
self.watcher().close(||());
}
}
impl Drop for UvStream {
fn finalize(&self) {
rtdebug!("closing stream");
//self.watcher().close(||());
}
}
impl Stream for UvStream {
fn read(&mut self, buf: &mut [u8]) -> Result<uint, ()> {
let result_cell = empty_cell();
let result_cell_ptr: *Cell<Result<uint, ()>> = &result_cell;
do Scheduler::local |scheduler| {
assert!(scheduler.in_task_context());
let watcher = self.watcher();
let buf_ptr: *&mut [u8] = &buf;
do scheduler.block_running_task_and_then |scheduler, task| {
rtdebug!("read: entered scheduler context");
assert!(!scheduler.in_task_context());
let mut watcher = watcher;
let task_cell = Cell(task);
// XXX: We shouldn't reallocate these callbacks every
// call to read
let alloc: AllocCallback = |_| unsafe {
slice_to_uv_buf(*buf_ptr)
};
do watcher.read_start(alloc) |watcher, nread, _buf, status| {
// Stop reading so that no read callbacks are
// triggered before the user calls `read` again.
// XXX: Is there a performance impact to calling
// stop here?
let mut watcher = watcher;
watcher.read_stop();
let result = if status.is_none() {
assert!(nread >= 0);
Ok(nread as uint)
} else {
Err(())
};
unsafe { (*result_cell_ptr).put_back(result); }
do Scheduler::local |scheduler| {
scheduler.resume_task_immediately(task_cell.take());
}
}
}
}
assert!(!result_cell.is_empty());
return result_cell.take();
}
fn write(&mut self, buf: &[u8]) -> Result<(), ()> {
let result_cell = empty_cell();
let result_cell_ptr: *Cell<Result<(), ()>> = &result_cell;
do Scheduler::local |scheduler| {
assert!(scheduler.in_task_context());
let watcher = self.watcher();
let buf_ptr: *&[u8] = &buf;
do scheduler.block_running_task_and_then |_, task| {
let mut watcher = watcher;
let task_cell = Cell(task);
let buf = unsafe { &*buf_ptr };
// XXX: OMGCOPIES
let buf = buf.to_vec();
do watcher.write(buf) |_watcher, status| {
let result = if status.is_none() {
Ok(())
} else {
Err(())
};
unsafe { (*result_cell_ptr).put_back(result); }
do Scheduler::local |scheduler| {
scheduler.resume_task_immediately(task_cell.take());
}
}
}
}
assert!(!result_cell.is_empty());
return result_cell.take();
}
}
#[test]
#[ignore(reason = "ffi struct issues")]
fn test_simple_io_no_connect() {
do run_in_bare_thread {
let mut sched = ~UvEventLoop::new_scheduler();
let task = ~do Task::new(&mut sched.stack_pool) {
do Scheduler::local |sched| {
let io = sched.event_loop.io().unwrap();
let addr = Ipv4(127, 0, 0, 1, 2926);
let maybe_chan = io.connect(addr);
assert!(maybe_chan.is_none());
}
};
sched.task_queue.push_back(task);
sched.run();
}
}
#[test]
#[ignore(reason = "ffi struct issues")]
fn test_simple_tcp_server_and_client() {
do run_in_bare_thread {
let mut sched = ~UvEventLoop::new_scheduler();
let addr = Ipv4(127, 0, 0, 1, 2929);
let client_task = ~do Task::new(&mut sched.stack_pool) {
do Scheduler::local |sched| {
let io = sched.event_loop.io().unwrap();
let mut stream = io.connect(addr).unwrap();
stream.write([0, 1, 2, 3, 4, 5, 6, 7]);
stream.close();
}
};
let server_task = ~do Task::new(&mut sched.stack_pool) {
do Scheduler::local |sched| {
let io = sched.event_loop.io().unwrap();
let mut listener = io.bind(addr).unwrap();
let mut stream = listener.listen().unwrap();
let mut buf = [0, .. 2048];
let nread = stream.read(buf).unwrap();
assert!(nread == 8);
for uint::range(0, nread) |i| {
rtdebug!("%u", buf[i] as uint);
assert!(buf[i] == i as u8);
}
stream.close();
listener.close();
}
};
// Start the server first so it listens before the client connects
sched.task_queue.push_back(server_task);
sched.task_queue.push_back(client_task);
sched.run();
}
}
#[test] #[ignore(reason = "busted")]
fn test_read_and_block() {
do run_in_bare_thread {
let mut sched = ~UvEventLoop::new_scheduler();
let addr = Ipv4(127, 0, 0, 1, 2930);
let client_task = ~do Task::new(&mut sched.stack_pool) {
do Scheduler::local |sched| {
let io = sched.event_loop.io().unwrap();
let mut stream = io.connect(addr).unwrap();
stream.write([0, 1, 2, 3, 4, 5, 6, 7]);
stream.write([0, 1, 2, 3, 4, 5, 6, 7]);
stream.write([0, 1, 2, 3, 4, 5, 6, 7]);
stream.write([0, 1, 2, 3, 4, 5, 6, 7]);
stream.close();
}
};
let server_task = ~do Task::new(&mut sched.stack_pool) {
do Scheduler::local |sched| {
let io = sched.event_loop.io().unwrap();
let mut listener = io.bind(addr).unwrap();
let mut stream = listener.listen().unwrap();
let mut buf = [0, .. 2048];
let expected = 32;
let mut current = 0;
let mut reads = 0;
while current < expected {
let nread = stream.read(buf).unwrap();
for uint::range(0, nread) |i| {
let val = buf[i] as uint;
assert!(val == current % 8);
current += 1;
}
reads += 1;
do Scheduler::local |scheduler| {
// Yield to the other task in hopes that it
// will trigger a read callback while we are
// not ready for it
do scheduler.block_running_task_and_then |scheduler, task| {
scheduler.task_queue.push_back(task);
}
}
}
// Make sure we had multiple reads
assert!(reads > 1);
stream.close();
listener.close();
}
};
// Start the server first so it listens before the client connects
sched.task_queue.push_back(server_task);
sched.task_queue.push_back(client_task);
sched.run();
}
}
#[test] #[ignore(reason = "needs server")]
fn test_read_read_read() {
do run_in_bare_thread {
let mut sched = ~UvEventLoop::new_scheduler();
let addr = Ipv4(127, 0, 0, 1, 2931);
let client_task = ~do Task::new(&mut sched.stack_pool) {
do Scheduler::local |sched| {
let io = sched.event_loop.io().unwrap();
let mut stream = io.connect(addr).unwrap();
let mut buf = [0, .. 2048];
let mut total_bytes_read = 0;
while total_bytes_read < 500000000 {
let nread = stream.read(buf).unwrap();
rtdebug!("read %u bytes", nread as uint);
total_bytes_read += nread;
}
rtdebug_!("read %u bytes total", total_bytes_read as uint);
stream.close();
}
};
sched.task_queue.push_back(client_task);
sched.run();
}
}<|fim▁end|>
|
fn test_callback_run_once() {
do run_in_bare_thread {
let mut event_loop = UvEventLoop::new();
|
<|file_name|>common.py<|end_file_name|><|fim▁begin|>"""Common code for Withings."""
import asyncio
from dataclasses import dataclass
import datetime
from datetime import timedelta
from enum import Enum, IntEnum
import logging
import re
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
from aiohttp.web import Response
import requests
from withings_api import AbstractWithingsApi
from withings_api.common import (
AuthFailedException,
GetSleepSummaryField,
MeasureGroupAttribs,
MeasureType,
MeasureTypes,
NotifyAppli,
SleepGetSummaryResponse,
UnauthorizedException,
query_measure_groups,
)
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.http import HomeAssistantView
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_WEBHOOK_ID,
HTTP_UNAUTHORIZED,
MASS_KILOGRAMS,
PERCENTAGE,
SPEED_METERS_PER_SECOND,
TIME_SECONDS,
)
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.helpers.config_entry_oauth2_flow import (
AUTH_CALLBACK_PATH,
AbstractOAuth2Implementation,
LocalOAuth2Implementation,
OAuth2Session,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_registry import EntityRegistry
from homeassistant.helpers.network import get_url
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.util import dt
from . import const
from .const import Measurement
_LOGGER = logging.getLogger(const.LOG_NAMESPACE)
NOT_AUTHENTICATED_ERROR = re.compile(
f"^{HTTP_UNAUTHORIZED},.*",
re.IGNORECASE,
)
DATA_UPDATED_SIGNAL = "withings_entity_state_updated"
MeasurementData = Dict[Measurement, Any]
class NotAuthenticatedError(HomeAssistantError):
"""Raise when not authenticated with the service."""
class ServiceError(HomeAssistantError):
"""Raise when the service has an error."""
class UpdateType(Enum):
"""Data update type."""
POLL = "poll"
WEBHOOK = "webhook"
@dataclass
class WithingsAttribute:
"""Immutable class for describing withings sensor data."""
measurement: Measurement
measute_type: Enum
friendly_name: str
unit_of_measurement: str
icon: Optional[str]
platform: str
enabled_by_default: bool
update_type: UpdateType
@dataclass
class WithingsData:
"""Represents value and meta-data from the withings service."""
attribute: WithingsAttribute
value: Any
@dataclass
class WebhookConfig:
"""Config for a webhook."""
id: str
url: str
enabled: bool
@dataclass
class StateData:
"""State data held by data manager for retrieval by entities."""
unique_id: str
state: Any
WITHINGS_ATTRIBUTES = [
WithingsAttribute(
Measurement.WEIGHT_KG,
MeasureType.WEIGHT,
"Weight",
MASS_KILOGRAMS,
"mdi:weight-kilogram",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.FAT_MASS_KG,
MeasureType.FAT_MASS_WEIGHT,
"Fat Mass",
MASS_KILOGRAMS,
"mdi:weight-kilogram",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.FAT_FREE_MASS_KG,
MeasureType.FAT_FREE_MASS,
"Fat Free Mass",
MASS_KILOGRAMS,
"mdi:weight-kilogram",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.MUSCLE_MASS_KG,
MeasureType.MUSCLE_MASS,
"Muscle Mass",
MASS_KILOGRAMS,
"mdi:weight-kilogram",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.BONE_MASS_KG,
MeasureType.BONE_MASS,
"Bone Mass",
MASS_KILOGRAMS,
"mdi:weight-kilogram",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.HEIGHT_M,
MeasureType.HEIGHT,
"Height",
const.UOM_LENGTH_M,
"mdi:ruler",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.TEMP_C,
MeasureType.TEMPERATURE,
"Temperature",
const.UOM_TEMP_C,
"mdi:thermometer",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.BODY_TEMP_C,
MeasureType.BODY_TEMPERATURE,
"Body Temperature",
const.UOM_TEMP_C,
"mdi:thermometer",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SKIN_TEMP_C,
MeasureType.SKIN_TEMPERATURE,
"Skin Temperature",
const.UOM_TEMP_C,
"mdi:thermometer",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.FAT_RATIO_PCT,
MeasureType.FAT_RATIO,
"Fat Ratio",
PERCENTAGE,
None,
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.DIASTOLIC_MMHG,
MeasureType.DIASTOLIC_BLOOD_PRESSURE,
"Diastolic Blood Pressure",
const.UOM_MMHG,
None,
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SYSTOLIC_MMGH,
MeasureType.SYSTOLIC_BLOOD_PRESSURE,
"Systolic Blood Pressure",
const.UOM_MMHG,
None,
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.HEART_PULSE_BPM,
MeasureType.HEART_RATE,
"Heart Pulse",
const.UOM_BEATS_PER_MINUTE,
"mdi:heart-pulse",
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SPO2_PCT,
MeasureType.SP02,
"SP02",
PERCENTAGE,
None,
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.HYDRATION,
MeasureType.HYDRATION,
"Hydration",
MASS_KILOGRAMS,
"mdi:water",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.PWV,
MeasureType.PULSE_WAVE_VELOCITY,
"Pulse Wave Velocity",
SPEED_METERS_PER_SECOND,
None,
SENSOR_DOMAIN,
True,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_BREATHING_DISTURBANCES_INTENSITY,
GetSleepSummaryField.BREATHING_DISTURBANCES_INTENSITY,
"Breathing disturbances intensity",
"",
"",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_DEEP_DURATION_SECONDS,
GetSleepSummaryField.DEEP_SLEEP_DURATION,
"Deep sleep",
TIME_SECONDS,
"mdi:sleep",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_TOSLEEP_DURATION_SECONDS,
GetSleepSummaryField.DURATION_TO_SLEEP,
"Time to sleep",
TIME_SECONDS,
"mdi:sleep",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_TOWAKEUP_DURATION_SECONDS,
GetSleepSummaryField.DURATION_TO_WAKEUP,
"Time to wakeup",
TIME_SECONDS,
"mdi:sleep-off",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_HEART_RATE_AVERAGE,
GetSleepSummaryField.HR_AVERAGE,
"Average heart rate",
const.UOM_BEATS_PER_MINUTE,
"mdi:heart-pulse",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_HEART_RATE_MAX,
GetSleepSummaryField.HR_MAX,
"Maximum heart rate",
const.UOM_BEATS_PER_MINUTE,
"mdi:heart-pulse",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_HEART_RATE_MIN,
GetSleepSummaryField.HR_MIN,
"Minimum heart rate",
const.UOM_BEATS_PER_MINUTE,
"mdi:heart-pulse",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_LIGHT_DURATION_SECONDS,
GetSleepSummaryField.LIGHT_SLEEP_DURATION,
"Light sleep",
TIME_SECONDS,
"mdi:sleep",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_REM_DURATION_SECONDS,
GetSleepSummaryField.REM_SLEEP_DURATION,
"REM sleep",
TIME_SECONDS,
"mdi:sleep",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_RESPIRATORY_RATE_AVERAGE,
GetSleepSummaryField.RR_AVERAGE,
"Average respiratory rate",
const.UOM_BREATHS_PER_MINUTE,
None,
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_RESPIRATORY_RATE_MAX,
GetSleepSummaryField.RR_MAX,
"Maximum respiratory rate",
const.UOM_BREATHS_PER_MINUTE,
None,
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_RESPIRATORY_RATE_MIN,
GetSleepSummaryField.RR_MIN,
"Minimum respiratory rate",
const.UOM_BREATHS_PER_MINUTE,
None,
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_SCORE,
GetSleepSummaryField.SLEEP_SCORE,
"Sleep score",
const.SCORE_POINTS,
"mdi:medal",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_SNORING,
GetSleepSummaryField.SNORING,
"Snoring",
"",
None,
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_SNORING_EPISODE_COUNT,
GetSleepSummaryField.SNORING_EPISODE_COUNT,
"Snoring episode count",
"",
None,
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_WAKEUP_COUNT,
GetSleepSummaryField.WAKEUP_COUNT,
"Wakeup count",
const.UOM_FREQUENCY,
"mdi:sleep-off",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
WithingsAttribute(
Measurement.SLEEP_WAKEUP_DURATION_SECONDS,
GetSleepSummaryField.WAKEUP_DURATION,
"Wakeup time",
TIME_SECONDS,
"mdi:sleep-off",
SENSOR_DOMAIN,
False,
UpdateType.POLL,
),
# Webhook measurements.
WithingsAttribute(
Measurement.IN_BED,
NotifyAppli.BED_IN,
"In bed",
"",
"mdi:bed",
BINARY_SENSOR_DOMAIN,
True,
UpdateType.WEBHOOK,
),
]
WITHINGS_MEASUREMENTS_MAP: Dict[Measurement, WithingsAttribute] = {
attr.measurement: attr for attr in WITHINGS_ATTRIBUTES
}
WITHINGS_MEASURE_TYPE_MAP: Dict[
Union[NotifyAppli, GetSleepSummaryField, MeasureType], WithingsAttribute
] = {attr.measute_type: attr for attr in WITHINGS_ATTRIBUTES}
class ConfigEntryWithingsApi(AbstractWithingsApi):
"""Withing API that uses HA resources."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
implementation: AbstractOAuth2Implementation,
):
"""Initialize object."""
self._hass = hass
self._config_entry = config_entry
self._implementation = implementation
self.session = OAuth2Session(hass, config_entry, implementation)
def _request(
self, path: str, params: Dict[str, Any], method: str = "GET"
) -> Dict[str, Any]:
"""Perform an async request."""
asyncio.run_coroutine_threadsafe(
self.session.async_ensure_token_valid(), self._hass.loop
)
access_token = self._config_entry.data["token"]["access_token"]
response = requests.request(
method,
f"{self.URL}/{path}",
params=params,
headers={"Authorization": f"Bearer {access_token}"},
)
return response.json()
def json_message_response(message: str, message_code: int) -> Response:
"""Produce common json output."""
return HomeAssistantView.json({"message": message, "code": message_code}, 200)
class WebhookAvailability(IntEnum):
"""Represents various statuses of webhook availability."""
SUCCESS = 0
CONNECT_ERROR = 1
HTTP_ERROR = 2
NOT_WEBHOOK = 3
class WebhookUpdateCoordinator:
"""Coordinates webhook data updates across listeners."""
def __init__(self, hass: HomeAssistant, user_id: int) -> None:
"""Initialize the object."""
self._hass = hass
self._user_id = user_id
self._listeners: List[CALLBACK_TYPE] = []
self.data: MeasurementData = {}
def async_add_listener(self, listener: CALLBACK_TYPE) -> Callable[[], None]:
"""Add a listener."""
self._listeners.append(listener)
@callback
def remove_listener() -> None:
self.async_remove_listener(listener)
return remove_listener
def async_remove_listener(self, listener: CALLBACK_TYPE) -> None:
"""Remove a listener."""
self._listeners.remove(listener)
def update_data(self, measurement: Measurement, value: Any) -> None:
"""Update the data object and notify listeners the data has changed."""
self.data[measurement] = value
self.notify_data_changed()
def notify_data_changed(self) -> None:
"""Notify all listeners the data has changed."""
for listener in self._listeners:
listener()
class DataManager:
"""Manage withing data."""
def __init__(
self,
hass: HomeAssistant,
profile: str,
api: ConfigEntryWithingsApi,
user_id: int,
webhook_config: WebhookConfig,
):
"""Initialize the data manager."""
self._hass = hass
self._api = api
self._user_id = user_id
self._profile = profile
self._webhook_config = webhook_config
self._notify_subscribe_delay = datetime.timedelta(seconds=5)
self._notify_unsubscribe_delay = datetime.timedelta(seconds=1)
self._is_available = True
self._cancel_interval_update_interval: Optional[CALLBACK_TYPE] = None
self._cancel_configure_webhook_subscribe_interval: Optional[
CALLBACK_TYPE
] = None
self._api_notification_id = f"withings_{self._user_id}"
self.subscription_update_coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name="subscription_update_coordinator",
update_interval=timedelta(minutes=120),
update_method=self.async_subscribe_webhook,
)
self.poll_data_update_coordinator = DataUpdateCoordinator[
Dict[MeasureType, Any]
](
hass,
_LOGGER,
name="poll_data_update_coordinator",
update_interval=timedelta(minutes=120)
if self._webhook_config.enabled
else timedelta(minutes=10),
update_method=self.async_get_all_data,
)
self.webhook_update_coordinator = WebhookUpdateCoordinator(
self._hass, self._user_id
)
self._cancel_subscription_update: Optional[Callable[[], None]] = None
self._subscribe_webhook_run_count = 0
@property
def webhook_config(self) -> WebhookConfig:
"""Get the webhook config."""
return self._webhook_config
@property
def user_id(self) -> int:
"""Get the user_id of the authenticated user."""
return self._user_id
@property
def profile(self) -> str:
"""Get the profile."""
return self._profile
def async_start_polling_webhook_subscriptions(self) -> None:
"""Start polling webhook subscriptions (if enabled) to reconcile their setup."""
self.async_stop_polling_webhook_subscriptions()
def empty_listener() -> None:
pass
self._cancel_subscription_update = (
self.subscription_update_coordinator.async_add_listener(empty_listener)
)
def async_stop_polling_webhook_subscriptions(self) -> None:
"""Stop polling webhook subscriptions."""
if self._cancel_subscription_update:
self._cancel_subscription_update()
self._cancel_subscription_update = None
async def _do_retry(self, func, attempts=3) -> Any:
"""Retry a function call.
Withings' API occasionally and incorrectly throws errors. Retrying the call tends to work.
"""
exception = None
for attempt in range(1, attempts + 1):
_LOGGER.debug("Attempt %s of %s", attempt, attempts)
try:
return await func()
except Exception as exception1: # pylint: disable=broad-except
await asyncio.sleep(0.1)
exception = exception1
continue
if exception:
raise exception
async def async_subscribe_webhook(self) -> None:
"""Subscribe the webhook to withings data updates."""
return await self._do_retry(self._async_subscribe_webhook)
async def _async_subscribe_webhook(self) -> None:
_LOGGER.debug("Configuring withings webhook")
# On first startup, perform a fresh re-subscribe. Withings stops pushing data
# if the webhook fails enough times but they don't remove the old subscription
# config. This ensures the subscription is setup correctly and they start
# pushing again.
if self._subscribe_webhook_run_count == 0:
_LOGGER.debug("Refreshing withings webhook configs")
await self.async_unsubscribe_webhook()
self._subscribe_webhook_run_count += 1
# Get the current webhooks.
response = await self._hass.async_add_executor_job(self._api.notify_list)
subscribed_applis = frozenset(
[
profile.appli
for profile in response.profiles
if profile.callbackurl == self._webhook_config.url
]
)
# Determine what subscriptions need to be created.
ignored_applis = frozenset({NotifyAppli.USER})
to_add_applis = frozenset(
[
appli
for appli in NotifyAppli
if appli not in subscribed_applis and appli not in ignored_applis
]
)
# Subscribe to each one.
for appli in to_add_applis:
_LOGGER.debug(
"Subscribing %s for %s in %s seconds",
self._webhook_config.url,
appli,
self._notify_subscribe_delay.total_seconds(),
)
# Withings will HTTP HEAD the callback_url and needs some downtime
# between each call or there is a higher chance of failure.
await asyncio.sleep(self._notify_subscribe_delay.total_seconds())
await self._hass.async_add_executor_job(
self._api.notify_subscribe, self._webhook_config.url, appli
)
async def async_unsubscribe_webhook(self) -> None:
"""Unsubscribe webhook from withings data updates."""
return await self._do_retry(self._async_unsubscribe_webhook)
async def _async_unsubscribe_webhook(self) -> None:
# Get the current webhooks.
response = await self._hass.async_add_executor_job(self._api.notify_list)
# Revoke subscriptions.
for profile in response.profiles:
_LOGGER.debug(
"Unsubscribing %s for %s in %s seconds",
profile.callbackurl,
profile.appli,
self._notify_unsubscribe_delay.total_seconds(),
)
# Quick calls to Withings can result in the service returning errors. Give them
# some time to cool down.
await asyncio.sleep(self._notify_subscribe_delay.total_seconds())
await self._hass.async_add_executor_job(
self._api.notify_revoke, profile.callbackurl, profile.appli
)
async def async_get_all_data(self) -> Optional[Dict[MeasureType, Any]]:
"""Update all withings data."""
try:
return await self._do_retry(self._async_get_all_data)
except Exception as exception:
# User is not authenticated.
if isinstance(
exception, (UnauthorizedException, AuthFailedException)
) or NOT_AUTHENTICATED_ERROR.match(str(exception)):
context = {
const.PROFILE: self._profile,
"userid": self._user_id,
"source": "reauth",
}
# Check if reauth flow already exists.
flow = next(
iter(
flow
for flow in self._hass.config_entries.flow.async_progress()
if flow.context == context
),
None,
)
if flow:
return
# Start a reauth flow.
await self._hass.config_entries.flow.async_init(
const.DOMAIN,
context=context,
)
return
raise exception
async def _async_get_all_data(self) -> Optional[Dict[MeasureType, Any]]:
_LOGGER.info("Updating all withings data")
return {
**await self.async_get_measures(),
**await self.async_get_sleep_summary(),
}
async def async_get_measures(self) -> Dict[MeasureType, Any]:
"""Get the measures data."""
_LOGGER.debug("Updating withings measures")
response = await self._hass.async_add_executor_job(self._api.measure_get_meas)
# Sort from oldest to newest.
groups = sorted(
query_measure_groups(
response, MeasureTypes.ANY, MeasureGroupAttribs.UNAMBIGUOUS
),
key=lambda group: group.created.datetime,
reverse=False,
)
return {
WITHINGS_MEASURE_TYPE_MAP[measure.type].measurement: round(
float(measure.value * pow(10, measure.unit)), 2
)
for group in groups
for measure in group.measures
}
async def async_get_sleep_summary(self) -> Dict[MeasureType, Any]:
"""Get the sleep summary data."""
_LOGGER.debug("Updating withing sleep summary")
now = dt.utcnow()
yesterday = now - datetime.timedelta(days=1)
yesterday_noon = datetime.datetime(
yesterday.year,
yesterday.month,
yesterday.day,
12,
0,
0,
0,
datetime.timezone.utc,
)
def get_sleep_summary() -> SleepGetSummaryResponse:
return self._api.sleep_get_summary(
lastupdate=yesterday_noon,
data_fields=[
GetSleepSummaryField.BREATHING_DISTURBANCES_INTENSITY,
GetSleepSummaryField.DEEP_SLEEP_DURATION,
GetSleepSummaryField.DURATION_TO_SLEEP,
GetSleepSummaryField.DURATION_TO_WAKEUP,
GetSleepSummaryField.HR_AVERAGE,
GetSleepSummaryField.HR_MAX,
GetSleepSummaryField.HR_MIN,
GetSleepSummaryField.LIGHT_SLEEP_DURATION,
GetSleepSummaryField.REM_SLEEP_DURATION,
GetSleepSummaryField.RR_AVERAGE,
GetSleepSummaryField.RR_MAX,
GetSleepSummaryField.RR_MIN,
GetSleepSummaryField.SLEEP_SCORE,
GetSleepSummaryField.SNORING,
GetSleepSummaryField.SNORING_EPISODE_COUNT,
GetSleepSummaryField.WAKEUP_COUNT,
GetSleepSummaryField.WAKEUP_DURATION,
],
)
response = await self._hass.async_add_executor_job(get_sleep_summary)
# Set the default to empty lists.
raw_values: Dict[GetSleepSummaryField, List[int]] = {
field: [] for field in GetSleepSummaryField
}
# Collect the raw data.
for serie in response.series:
data = serie.data
for field in GetSleepSummaryField:
raw_values[field].append(data._asdict()[field.value])
values: Dict[GetSleepSummaryField, float] = {}
def average(data: List[int]) -> float:
return sum(data) / len(data)
def set_value(field: GetSleepSummaryField, func: Callable) -> None:
non_nones = [
value for value in raw_values.get(field, []) if value is not None
]
values[field] = func(non_nones) if non_nones else None
set_value(GetSleepSummaryField.BREATHING_DISTURBANCES_INTENSITY, average)
set_value(GetSleepSummaryField.DEEP_SLEEP_DURATION, sum)
set_value(GetSleepSummaryField.DURATION_TO_SLEEP, average)
set_value(GetSleepSummaryField.DURATION_TO_WAKEUP, average)
set_value(GetSleepSummaryField.HR_AVERAGE, average)
set_value(GetSleepSummaryField.HR_MAX, average)
set_value(GetSleepSummaryField.HR_MIN, average)
set_value(GetSleepSummaryField.LIGHT_SLEEP_DURATION, sum)
set_value(GetSleepSummaryField.REM_SLEEP_DURATION, sum)
set_value(GetSleepSummaryField.RR_AVERAGE, average)
set_value(GetSleepSummaryField.RR_MAX, average)
set_value(GetSleepSummaryField.RR_MIN, average)
set_value(GetSleepSummaryField.SLEEP_SCORE, max)
set_value(GetSleepSummaryField.SNORING, average)
set_value(GetSleepSummaryField.SNORING_EPISODE_COUNT, sum)
set_value(GetSleepSummaryField.WAKEUP_COUNT, sum)
set_value(GetSleepSummaryField.WAKEUP_DURATION, average)
return {
WITHINGS_MEASURE_TYPE_MAP[field].measurement: round(value, 4)
if value is not None
else None
for field, value in values.items()
}
async def async_webhook_data_updated(self, data_category: NotifyAppli) -> None:
"""Handle scenario when data is updated from a webook."""
_LOGGER.debug("Withings webhook triggered")
if data_category in {
NotifyAppli.WEIGHT,
NotifyAppli.CIRCULATORY,
NotifyAppli.SLEEP,
}:
await self.poll_data_update_coordinator.async_request_refresh()
elif data_category in {NotifyAppli.BED_IN, NotifyAppli.BED_OUT}:
self.webhook_update_coordinator.update_data(
Measurement.IN_BED, data_category == NotifyAppli.BED_IN
)
def get_attribute_unique_id(attribute: WithingsAttribute, user_id: int) -> str:
"""Get a entity unique id for a user's attribute."""
return f"withings_{user_id}_{attribute.measurement.value}"
async def async_get_entity_id(
hass: HomeAssistant, attribute: WithingsAttribute, user_id: int
) -> Optional[str]:
"""Get an entity id for a user's attribute."""
entity_registry: EntityRegistry = (
await hass.helpers.entity_registry.async_get_registry()
)
unique_id = get_attribute_unique_id(attribute, user_id)
entity_id = entity_registry.async_get_entity_id(
attribute.platform, const.DOMAIN, unique_id
)
if entity_id is None:
_LOGGER.error("Cannot find entity id for unique_id: %s", unique_id)<|fim▁hole|>
return entity_id
class BaseWithingsSensor(Entity):
"""Base class for withings sensors."""
def __init__(self, data_manager: DataManager, attribute: WithingsAttribute) -> None:
"""Initialize the Withings sensor."""
self._data_manager = data_manager
self._attribute = attribute
self._profile = self._data_manager.profile
self._user_id = self._data_manager.user_id
self._name = f"Withings {self._attribute.measurement.value} {self._profile}"
self._unique_id = get_attribute_unique_id(self._attribute, self._user_id)
self._state_data: Optional[Any] = None
@property
def should_poll(self) -> bool:
"""Return False to indicate HA should not poll for changes."""
return False
@property
def name(self) -> str:
"""Return the name of the sensor."""
return self._name
@property
def available(self) -> bool:
"""Return True if entity is available."""
if self._attribute.update_type == UpdateType.POLL:
return self._data_manager.poll_data_update_coordinator.last_update_success
if self._attribute.update_type == UpdateType.WEBHOOK:
return self._data_manager.webhook_config.enabled and (
self._attribute.measurement
in self._data_manager.webhook_update_coordinator.data
)
return True
@property
def unique_id(self) -> str:
"""Return a unique, Home Assistant friendly identifier for this entity."""
return self._unique_id
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of this entity, if any."""
return self._attribute.unit_of_measurement
@property
def icon(self) -> str:
"""Icon to use in the frontend, if any."""
return self._attribute.icon
@property
def entity_registry_enabled_default(self) -> bool:
"""Return if the entity should be enabled when first added to the entity registry."""
return self._attribute.enabled_by_default
@callback
def _on_poll_data_updated(self) -> None:
self._update_state_data(
self._data_manager.poll_data_update_coordinator.data or {}
)
@callback
def _on_webhook_data_updated(self) -> None:
self._update_state_data(
self._data_manager.webhook_update_coordinator.data or {}
)
def _update_state_data(self, data: MeasurementData) -> None:
"""Update the state data."""
self._state_data = data.get(self._attribute.measurement)
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Register update dispatcher."""
if self._attribute.update_type == UpdateType.POLL:
self.async_on_remove(
self._data_manager.poll_data_update_coordinator.async_add_listener(
self._on_poll_data_updated
)
)
self._on_poll_data_updated()
elif self._attribute.update_type == UpdateType.WEBHOOK:
self.async_on_remove(
self._data_manager.webhook_update_coordinator.async_add_listener(
self._on_webhook_data_updated
)
)
self._on_webhook_data_updated()
async def async_get_data_manager(
hass: HomeAssistant, config_entry: ConfigEntry
) -> DataManager:
"""Get the data manager for a config entry."""
hass.data.setdefault(const.DOMAIN, {})
hass.data[const.DOMAIN].setdefault(config_entry.entry_id, {})
config_entry_data = hass.data[const.DOMAIN][config_entry.entry_id]
if const.DATA_MANAGER not in config_entry_data:
profile = config_entry.data.get(const.PROFILE)
_LOGGER.debug("Creating withings data manager for profile: %s", profile)
config_entry_data[const.DATA_MANAGER] = DataManager(
hass,
profile,
ConfigEntryWithingsApi(
hass=hass,
config_entry=config_entry,
implementation=await config_entry_oauth2_flow.async_get_config_entry_implementation(
hass, config_entry
),
),
config_entry.data["token"]["userid"],
WebhookConfig(
id=config_entry.data[CONF_WEBHOOK_ID],
url=config_entry.data[const.CONF_WEBHOOK_URL],
enabled=config_entry.data[const.CONF_USE_WEBHOOK],
),
)
return config_entry_data[const.DATA_MANAGER]
def get_data_manager_by_webhook_id(
hass: HomeAssistant, webhook_id: str
) -> Optional[DataManager]:
"""Get a data manager by it's webhook id."""
return next(
iter(
[
data_manager
for data_manager in get_all_data_managers(hass)
if data_manager.webhook_config.id == webhook_id
]
),
None,
)
def get_all_data_managers(hass: HomeAssistant) -> Tuple[DataManager, ...]:
"""Get all configured data managers."""
return tuple(
[
config_entry_data[const.DATA_MANAGER]
for config_entry_data in hass.data[const.DOMAIN].values()
if const.DATA_MANAGER in config_entry_data
]
)
def async_remove_data_manager(hass: HomeAssistant, config_entry: ConfigEntry) -> None:
"""Remove a data manager for a config entry."""
del hass.data[const.DOMAIN][config_entry.entry_id][const.DATA_MANAGER]
async def async_create_entities(
hass: HomeAssistant,
entry: ConfigEntry,
create_func: Callable[[DataManager, WithingsAttribute], Entity],
platform: str,
) -> List[Entity]:
"""Create withings entities from config entry."""
data_manager = await async_get_data_manager(hass, entry)
return [
create_func(data_manager, attribute)
for attribute in get_platform_attributes(platform)
]
def get_platform_attributes(platform: str) -> Tuple[WithingsAttribute, ...]:
"""Get withings attributes used for a specific platform."""
return tuple(
[
attribute
for attribute in WITHINGS_ATTRIBUTES
if attribute.platform == platform
]
)
class WithingsLocalOAuth2Implementation(LocalOAuth2Implementation):
"""Oauth2 implementation that only uses the external url."""
@property
def redirect_uri(self) -> str:
"""Return the redirect uri."""
url = get_url(self.hass, allow_internal=False, prefer_cloud=True)
return f"{url}{AUTH_CALLBACK_PATH}"<|fim▁end|>
|
return None
|
<|file_name|>rules.py<|end_file_name|><|fim▁begin|>"""
The symbols and rules for the CFG of C. I generated these myself by hand, so
they're probably not perfectly correct.
"""
from rules_obj import *
from lexer import *
import tokens
### Symbols ###
# Most symbols are either self-explanatory, or best understood by examining the
# rules below to see how they're used.
S = Symbol("S")
main_setup = Symbol("main_setup") #TODO: is this neccesary?
# `statments` is a buch of `statement`s
statements = Symbol("statements")
# `statement` is a single C statement, semicolon included
statement = Symbol("statement")
# a generic expression
E = Symbol("E")
declare_separator = Symbol("declare_separator")
declare_type = Symbol("declare_type")
declare_expression = Symbol("declare_expression");
arr_start = Symbol("arr_start")
arr_end = Symbol("arr_end")
arr_list = Symbol("arr_list")
if_start = Symbol("if_start");
if_statement = Symbol("if_statement");
else_statement = Symbol("else_statement");
while_start = Symbol("while_start")
while_statement = Symbol("while_statement")
for_start = Symbol("for_start")
for1 = Symbol("for1")
for2 = Symbol("for2")
for3 = Symbol("for3")
for_expr = Symbol("for_expr")
arg_start = Symbol("arg_start")
func_dec = Symbol("func_dec")
func_def = Symbol("func_def")
func_call_start = Symbol("func_call_start")
### Rules ###
# After adding a rule, make sure to add it to the rules list at the bottom!
# something that stands alone as a program, plus a function definition or
# declaration, can also stand alone as a program.
main_func_dec_cont = Rule(S, [S, func_dec])
main_func_def_cont = Rule(S, [S, func_def])
main_func_dec = Rule(S, [func_dec])
main_func_def = Rule(S, [func_def])
# make a `statements` symbol by extending another `statements` symbol
statements_cont = Rule(statements, [statements,
statement])
# make a single `statement` symbol into a `statements` symbol
statements_end = Rule(statements, [statement])
# return statement
return_form = Rule(statement, [tokens.return_command,
E,
tokens.semicolon])
# a print statement
# The print statement is not valid C. I added it for ease of use, however, as
# I do not forsee this compiler being able to inclue stdio.h anytime soon.
print_form = Rule(statement, [tokens.print_command,
E,
tokens.semicolon])
# a declaration of the form int;
useless_declaration = Rule(statement, [Token("type"), tokens.semicolon])
# a declaration of the form `int a;` or `int a, b = 0;`
real_declaration = Rule(statement, [declare_expression, tokens.semicolon])
# the type part of a declaration, along with any pointers on the first variable
declare_type_base = Rule(declare_type, [Token("type")])
declare_type_cont = Rule(declare_type, [declare_type, tokens.aster])
# used to separate declarations. all these are declare_separators:
# ,
# ,*
# , **
#
declare_separator_base = Rule(declare_separator, [tokens.comma])
declare_separator_cont = Rule(declare_separator, [declare_separator, tokens.aster])
# the base of a declaration, like `int hello` or `int* hello`.
base_declare = Rule(declare_expression, [declare_type, Token("name")])
# a non-array declaration with an assignment, like `int hello = 4` or `int* hello = &p`.
assign_declare = Rule(declare_expression, [declare_expression, tokens.equal, E], 49)
# an array declaration with assignment, like `int hi[4] = {1, 2, 3, 4}`.
# Note--I imagine a better parser would catch things like `int hi = {1, 3}`.
# Mine, however, catches these errors at the code generation stage.
arr_assign_declare = Rule(declare_expression, [declare_expression, tokens.equal, arr_list], 49)
# Converts things like `int a, b` into a fresh declare_expression to chain declarations
cont_declare = Rule(declare_expression, [declare_expression, declare_separator, Token("name")])
# Defines `int a[5]` as a valid declare expression
array_num_declare = Rule(declare_expression, [declare_expression,
tokens.open_sq_bracket,
E,
tokens.close_sq_bracket])
# Defines `int a[]` as a valid declare expression
array_nonum_declare = Rule(declare_expression, [declare_expression,
tokens.open_sq_bracket,
tokens.close_sq_bracket])
E_num = Rule(E, [Token("integer")])
E_parens = Rule(E, [tokens.open_paren,
E,
tokens.close_paren])
# Badly named--E_add can be binary addition or subtraction
E_add = Rule(E, [E,
Token("addop"),
E], 85)
E_mult = Rule(E, [E,
tokens.aster,
E], 90)
E_div = Rule(E, [E,
tokens.slash,
E], 90)
E_mod = Rule(E, [E,
tokens.percent,
E], 90)
E_boolean_and = Rule(E, [E,
tokens.logic_and,
E], 65)
E_boolean_or = Rule(E, [E,
tokens.logic_or,
E], 60)
E_eq_compare = Rule(E, [E,
Token("eq_compare"),
E], 70)
E_compare = Rule(E, [E,
Token("compare"),
E], 75)
# Again, badly named. E_neg can be either unary addition or subtraction
E_neg = Rule(E, [Token("addop"),
E], 95)
# Note this covers all of `a = 5`, `a *= 5`, `a /= 5`, etc.
# We give this rule a priority of 49, which is less than 50 (the priority) of
# the assignment symbols. This makes it right associative.
E_equal = Rule(E, [E,
Token("assignment"),
E], 49)
E_boolean_not = Rule(E, [tokens.logic_not, E], 95)
# Covers both a++ and a--
E_inc_after = Rule(E, [E,
Token("crement")], 100)
# Covers both ++a and --a
E_inc_before = Rule(E, [Token("crement"),
E], 95)
E_point = Rule(E, [tokens.aster, E], 95)
E_deref = Rule(E, [tokens.amper, E], 95)
# Calling a function like `f()`
E_func_noarg = Rule(E, [E, tokens.open_paren, tokens.close_paren])
# The start of a function call and first argument, like `f(1`
E_func_call_start = Rule(func_call_start, [E, tokens.open_paren, E], 0)
# Chaining more arguments onto the function call
E_func_call_cont = Rule(func_call_start, [func_call_start, tokens.comma, E], 0)
# Completing the function call
E_func_call_end = Rule(E, [func_call_start, tokens.close_paren])
# Array referencing, like `a[4]`
E_array = Rule(E, [E, tokens.open_sq_bracket, E, tokens.close_sq_bracket], 100)
E_var = Rule(E, [Token("name")])
E_form = Rule(statement, [E, tokens.semicolon])
# We have to separate out the start so (E) doesn't reduce to E in `if(E)`
if_start_form = Rule(if_start, [tokens.if_keyword,
tokens.open_paren])
# an if statement like `if(E) {}`
if_form_brackets = Rule(if_statement, [if_start,
E,
tokens.close_paren,
tokens.open_bracket,
tokens.close_bracket])
# a one line if statement like `if(E) a = 5;`
# it's OK to use "statements" here because statement -> statements immediately,
# so then this rule will apply right away
if_form_oneline = Rule(if_statement, [if_start,
E,
tokens.close_paren,
statements])
# the most common if form, like `if(E) {a = 5;}`
if_form_main = Rule(if_statement, [if_start,
E,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
# Same things, but for else
else_form_brackets = Rule(else_statement, [tokens.else_keyword,
tokens.open_bracket,
tokens.close_bracket])
else_form_oneline = Rule(else_statement, [tokens.else_keyword,
statements])
else_form_main = Rule(else_statement, [tokens.else_keyword,
tokens.open_bracket,
statements,
tokens.close_bracket])
# We use a priority here so if an "else" follows an "if_statement", the parser
# won't apply the if_form_general rule (instead of the correct ifelse_form_general)
if_form_general = Rule(statement, [if_statement], 200)
ifelse_form_general = Rule(statement, [if_statement, else_statement])
break_form = Rule(statement, [tokens.break_keyword, tokens.semicolon])
cont_form = Rule(statement, [tokens.cont_keyword, tokens.semicolon])
# We have to separate out the start so (E) doesn't reduce to E
while_start_form = Rule(while_start, [tokens.while_keyword, tokens.open_paren])
# Same as if statement rules
while_form_brackets = Rule(statement, [while_start,
E,
tokens.close_paren,
tokens.open_bracket,
tokens.close_bracket])
while_form_oneline = Rule(statement, [while_start,
E,
tokens.close_paren,
statements])
while_form_main = Rule(statement, [while_start,
E,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
# for statements
for_start_form = Rule(for_start, [tokens.for_keyword, tokens.open_paren])
for1_form = Rule(for1, [for_start, statements])
# The `statements` here better have a tree of the form:
# statements -> statement -> E, semicolon
# A better parser would probably check this while parsing, but I check during
# code gen.
for2_form = Rule(for2, [for1, statements])
for_expr_form = Rule(for_expr, [for2, E, tokens.close_paren])
for_expr_form_empty = Rule(for_expr, [for2, tokens.close_paren])
# Same as if statement rules
for_form_empty = Rule(statement, [for_expr,
tokens.semicolon])
for_form_brackets = Rule(statement, [for_expr,
tokens.open_bracket,
tokens.close_bracket])
for_form_oneline = Rule(statement, [for_expr,
statements])
for_form_main = Rule(statement, [for_expr,
tokens.open_bracket,
statements,
tokens.close_bracket])
# Array initializer with one element, like `{1}`
arr_list_one = Rule(arr_list, [tokens.open_bracket, E, tokens.close_bracket])
# Array initializer with no elements, like `{}`
arr_list_none = Rule(arr_list, [tokens.open_bracket, tokens.close_bracket])
# Start of array initializer and first element, like `{1,`
arr_list_start = Rule(arr_start, [tokens.open_bracket, E, tokens.comma])
# Contining array initalizer, like `{1, 2,`
arr_list_cont = Rule(arr_start, [arr_start, E, tokens.comma])
# Total array initializer, like `{1, 2, 3}`
arr_list_total = Rule(arr_list, [arr_start, arr_end])
# Array initializer end, like `3}`
arr_list_end = Rule(arr_end, [E, tokens.close_bracket])
# Argument list for defining/declaring functions
base_arg_form = Rule(arg_start, [declare_expression, # should have children [declare_type, name]
tokens.open_paren,
declare_expression])
cont_arg_form = Rule(arg_start, [arg_start,
tokens.comma,
declare_expression]) # should have kids [declare_type, name]
func_dec_form = Rule(func_dec, [arg_start, tokens.close_paren, tokens.semicolon])
func_def_form = Rule(func_def, [arg_start,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
noarg_func_dec_form = Rule(func_dec, [declare_expression,
tokens.open_paren,
tokens.close_paren,
tokens.semicolon])
noarg_func_def_form = Rule(func_def, [declare_expression,
tokens.open_paren,
tokens.close_paren,
tokens.open_bracket,
statements,
tokens.close_bracket])
semicolon_form = Rule(statement, [tokens.semicolon])
# List of all the rules to apply. Applied in the listed order.
# In general, try to list rules above in the same order as they're listed here.
rules = [main_func_def_cont,
main_func_dec_cont,
main_func_def,
main_func_dec,
statements_cont,
statements_end,
return_form,
print_form,
useless_declaration,
real_declaration,
declare_type_base,
declare_type_cont,
declare_separator_base,
declare_separator_cont,
base_declare,
assign_declare,
arr_assign_declare,
cont_declare,
array_num_declare,
array_nonum_declare,
E_num,
E_parens,
E_add,
E_mult,
E_div,
E_mod,
E_boolean_and,
E_boolean_or,
E_eq_compare,
E_compare,
E_neg,
E_equal,
E_boolean_not,
E_inc_after,
E_inc_before,
E_point,
E_deref,
E_func_noarg,
E_func_call_start,
E_func_call_cont,
E_func_call_end,
E_array,
E_var,
E_form,
if_start_form,
if_form_brackets,
if_form_oneline,
if_form_main,
if_form_general,
else_form_brackets,
else_form_oneline,
else_form_main,
ifelse_form_general,
break_form,
cont_form,<|fim▁hole|> while_start_form,
while_form_brackets,
while_form_oneline,
while_form_main,
for_start_form,
for1_form,
for2_form,
for_expr_form,
for_expr_form_empty,
for_form_brackets,
for_form_oneline,
for_form_main,
arr_list_one,
arr_list_none,
arr_list_start,
arr_list_cont,
arr_list_total,
arr_list_end,
base_arg_form,
cont_arg_form,
func_dec_form,
func_def_form,
noarg_func_dec_form,
noarg_func_def_form,
semicolon_form]<|fim▁end|>
| |
<|file_name|>mxCompositeLayout.js<|end_file_name|><|fim▁begin|>/**
* $Id: mxCompositeLayout.js,v 1.11 2010-01-02 09:45:15 gaudenz Exp $
* Copyright (c) 2006-2010, JGraph Ltd
*/
/**
* Class: mxCompositeLayout
*
* Allows to compose multiple layouts into a single layout. The master layout
* is the layout that handles move operations if another layout than the first
* element in <layouts> should be used. The <master> layout is not executed as
* the code assumes that it is part of <layouts>.
*
* Example:
* (code)
* var first = new mxFastOrganicLayout(graph);
* var second = new mxParallelEdgeLayout(graph);
* var layout = new mxCompositeLayout(graph, [first, second], first);
* layout.execute(graph.getDefaultParent());
* (end)
*
* Constructor: mxCompositeLayout
*
* Constructs a new layout using the given layouts. The graph instance is
* required for creating the transaction that contains all layouts.
*
* Arguments:
*
* graph - Reference to the enclosing <mxGraph>.
* layouts - Array of <mxGraphLayouts>.
* master - Optional layout that handles moves. If no layout is given then
* the first layout of the above array is used to handle moves.
*/
function mxCompositeLayout(graph, layouts, master)
{
mxGraphLayout.call(this, graph);
this.layouts = layouts;
this.master = master;<|fim▁hole|> * Extends mxGraphLayout.
*/
mxCompositeLayout.prototype = new mxGraphLayout();
mxCompositeLayout.prototype.constructor = mxCompositeLayout;
/**
* Variable: layouts
*
* Holds the array of <mxGraphLayouts> that this layout contains.
*/
mxCompositeLayout.prototype.layouts = null;
/**
* Variable: layouts
*
* Reference to the <mxGraphLayouts> that handles moves. If this is null
* then the first layout in <layouts> is used.
*/
mxCompositeLayout.prototype.master = null;
/**
* Function: moveCell
*
* Implements <mxGraphLayout.moveCell> by calling move on <master> or the first
* layout in <layouts>.
*/
mxCompositeLayout.prototype.moveCell = function(cell, x, y)
{
if (this.master != null)
{
this.master.move.apply(this.master, arguments);
}
else
{
this.layouts[0].move.apply(this.layouts[0], arguments);
}
};
/**
* Function: execute
*
* Implements <mxGraphLayout.execute> by executing all <layouts> in a
* single transaction.
*/
mxCompositeLayout.prototype.execute = function(parent)
{
var model = this.graph.getModel();
model.beginUpdate();
try
{
for (var i = 0; i < this.layouts.length; i++)
{
this.layouts[i].execute.apply(this.layouts[i], arguments);
}
}
finally
{
model.endUpdate();
}
};<|fim▁end|>
|
};
/**
|
<|file_name|>review.server.model.js<|end_file_name|><|fim▁begin|>'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),<|fim▁hole|>
/**
* Review Schema
*/
var ReviewSchema = new Schema({
content: {
type: String,
default: '',
trim: true
},
contentHTML: {
type: String,
default: ''
},
name: {
type: String,
default: '',
trim: true
},
score: {
type: Number,
default: 5,
required: 'Must rate the game out of 5',
min: [0, 'Score must be at least 0'],
max: [5, 'Score cannot be higher than 5']
},
created: {
type: Date,
default: Date.now
},
user: {
type: Schema.ObjectId,
ref: 'User'
},
gameId: {
type: String,
required: 'Game for review required'
},
game: {
type: Schema.ObjectId,
ref: 'Game'
},
triaged: {
type: Boolean,
default: false
},
liked: {
type: Number,
default: 0
},
disliked: {
type: Number,
default: 0
},
reports: [{
type: String,
enum: [
'Spam',
'Vote Manipulation',
'Personal Information',
'Troll',
'Harrassment'
]
}]
});
mongoose.model('Review', ReviewSchema);<|fim▁end|>
|
Schema = mongoose.Schema;
|
<|file_name|>ComSchemaName.cpp<|end_file_name|><|fim▁begin|>/**********************************************************************
// @@@ START COPYRIGHT @@@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// @@@ END COPYRIGHT @@@
**********************************************************************/
/* -*-C++-*-
*****************************************************************************
*
* File: ComSchemaName.C
* Description: methods for class ComSchemaName
*
* Created: 9/12/95
* Language: C++
*
*
*
*****************************************************************************
*/
#define SQLPARSERGLOBALS_NADEFAULTS // first
#include <string.h>
#include "ComASSERT.h"
#include "ComMPLoc.h"
#include "ComSchemaName.h"
#include "ComSqlText.h"
#include "NAString.h"
#include "SqlParserGlobals.h" // last
//
// constructors
//
//
// default constructor
//
ComSchemaName::ComSchemaName ()
{
}
//
// initializing constructor
//
ComSchemaName::ComSchemaName (const NAString &externalSchemaName)
{
scan(externalSchemaName);
}
//
// initializing constructor
//
ComSchemaName::ComSchemaName (const NAString &externalSchemaName,
size_t &bytesScanned)
{
scan(externalSchemaName, bytesScanned);
}
//
// initializing constructor
//
ComSchemaName::ComSchemaName (const ComAnsiNamePart &schemaNamePart)
: schemaNamePart_ (schemaNamePart)
{
}
//
// initializing constructor
//
ComSchemaName::ComSchemaName (const ComAnsiNamePart &catalogNamePart,
const ComAnsiNamePart &schemaNamePart)
: catalogNamePart_ (catalogNamePart)
, schemaNamePart_ (schemaNamePart)
{
// "cat." is invalid
if (NOT catalogNamePart_.isEmpty() AND schemaNamePart_.isEmpty())
clear();
}
//
// virtual destructor
//
ComSchemaName::~ComSchemaName ()
{
}
//
// assignment operator
//
ComSchemaName &ComSchemaName::operator= (const NAString &rhsSchemaName)
{
clear();
scan(rhsSchemaName);
return *this;
}
//
// accessors
//
const NAString &
ComSchemaName::getCatalogNamePartAsAnsiString(NABoolean) const
{
return catalogNamePart_.getExternalName();
}
const NAString &
ComSchemaName::getSchemaNamePartAsAnsiString(NABoolean) const
{
return schemaNamePart_.getExternalName();
}
NAString
ComSchemaName::getExternalName(NABoolean) const<|fim▁hole|> Int32 ok = 0;
#endif
if (NOT schemaNamePart_.isEmpty())
{
if (NOT catalogNamePart_.isEmpty())
{
#ifndef NDEBUG
ok = 1;
#endif
extSchemaName = getCatalogNamePartAsAnsiString() + "." +
getSchemaNamePartAsAnsiString();
}
else
{
extSchemaName = getSchemaNamePartAsAnsiString();
}
}
#ifndef NDEBUG
if (!ok)
cerr << "Warning: incomplete ComSchemaName " << extSchemaName << endl;
#endif
return extSchemaName;
}
//
// mutators
//
//
// Resets data members
//
void ComSchemaName::clear()
{
catalogNamePart_.clear();
schemaNamePart_.clear();
}
//
// private methods
//
//
// Scans (parses) input external-format schema name.
//
NABoolean
ComSchemaName::scan(const NAString &externalSchemaName)
{
size_t bytesScanned;
return scan(externalSchemaName, bytesScanned);
}
//
// Scans (parses) input external-format schema name.
//
// This method assumes that the parameter externalSchemaName only
// contains the external-format schema name. The syntax of an
// schema name is
//
// [ <catalog-name-part> ] . <schema-name-part>
//
// A schema name part must be specified; the catalog name part is optional.
//
// The method returns the number of bytes scanned via the parameter
// bytesScanned. If the scanned schema name is illegal, bytesScanned
// contains the number of bytes examined when the name is determined
// to be invalid.
//
// If the specified external-format schema name is valid, this method
// returns TRUE and saves the parsed ANSI SQL name part into data
// members catalogNamePart_ and schemaNamePart_; otherwise, it returns
// FALSE and does not changes the contents of the data members.
//
NABoolean
ComSchemaName::scan(const NAString &externalSchemaName,
size_t &bytesScanned)
{
size_t count;
size_t externalSchemaNameLen = externalSchemaName.length();
bytesScanned = 0;
#define COPY_VALIDATED_STRING(x) \
ComAnsiNamePart(x, ComAnsiNamePart::INTERNAL_FORMAT)
if (( SqlParser_Initialized() && SqlParser_NAMETYPE == DF_NSK) ||
(!SqlParser_Initialized() && *externalSchemaName.data() == '\\')) {
ComMPLoc loc(externalSchemaName);
switch (loc.getFormat()) {
case ComMPLoc::SUBVOL:
catalogNamePart_ = COPY_VALIDATED_STRING(loc.getSysDotVol());
schemaNamePart_ = COPY_VALIDATED_STRING(loc.getSubvolName());
bytesScanned = externalSchemaNameLen;
return TRUE;
case ComMPLoc::FILE:
if (!loc.hasSubvolName()) {
catalogNamePart_ = "";
schemaNamePart_ = COPY_VALIDATED_STRING(loc.getFileName());
bytesScanned = externalSchemaNameLen;
return TRUE;
}
}
}
// Each ComAnsiNamePart ctor below must be preceded by "count = 0;"
// -- see ComAnsiNamePart.cpp, and for a better scan implementation,
// see ComObjectName::scan() + ComObjectName(bytesScanned) ctor.
// ---------------------------------------------------------------------
// Scan the leftmost ANSI SQL name part.
// ---------------------------------------------------------------------
count = 0;
ComAnsiNamePart part1(externalSchemaName, count);
bytesScanned += count;
if (NOT part1.isValid())
return FALSE;
if (bytesScanned >= externalSchemaNameLen)
{
ComASSERT(bytesScanned == externalSchemaNameLen);
schemaNamePart_ = part1;
return TRUE; // "sch"
}
// Get past the period separator
if (NOT ComSqlText.isPeriod(externalSchemaName[bytesScanned++]))
return FALSE;
// ---------------------------------------------------------------------
// Scan the last ANSI SQL name part
// ---------------------------------------------------------------------
#pragma nowarn(1506) // warning elimination
Int32 remainingLen = externalSchemaNameLen - bytesScanned;
#pragma warn(1506) // warning elimination
NAString remainingName = externalSchemaName(bytesScanned, remainingLen);
count = 0;
ComAnsiNamePart part2(remainingName, count);
bytesScanned += count;
if (NOT part2.isValid())
return FALSE;
if (bytesScanned == externalSchemaNameLen)
{
catalogNamePart_ = part1;
schemaNamePart_ = part2;
return TRUE; // "cat.sch"
}
// The specified external-format object name contains some extra
// trailing characters -- illegal.
//
return FALSE;
} // ComSchemaName::scan()
void ComSchemaName::setDefinitionSchemaName (const COM_VERSION version)
{
}<|fim▁end|>
|
{
NAString extSchemaName;
#ifndef NDEBUG
|
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
// Add our custom tasks.
grunt.loadTasks('../../../tasks');
// Project configuration.
grunt.initConfig({
mochaTest: {
options: {
reporter: 'spec',
grep: 'tests that match grep',
invert: true
},
all: {
src: ['*.js']
}
}
<|fim▁hole|>};<|fim▁end|>
|
});
// Default task.
grunt.registerTask('default', ['mochaTest']);
|
<|file_name|>hello.py<|end_file_name|><|fim▁begin|>from helper import greeting
<|fim▁hole|><|fim▁end|>
|
greeting('Hello')
|
<|file_name|>diff.py<|end_file_name|><|fim▁begin|>"""Test diff."""
# --- import -------------------------------------------------------------------------------------
import numpy as np
import WrightTools as wt
# --- test ---------------------------------------------------------------------------------------
def test_ascending_1():
x = np.linspace(0, 10, 1000)
y = np.sin(x)
d = wt.kit.diff(x, y)
assert np.all((np.abs(d - np.cos(x)) < 0.0001)[:-1])
def test_ascending_2():
x = np.linspace(0, 10, 1000)
y = np.sin(x)
d = wt.kit.diff(x, y, 2)
assert np.all((np.abs(d + np.sin(x)) < 0.0001)[1:-2])
def test_ascending_3():
x = np.linspace(0, 10, 1000)
y = np.sin(x)
d = wt.kit.diff(x, y, 3)
assert np.all((np.abs(d + np.cos(x)) < 0.0001)[2:-3])
def test_ascending_4():
x = np.linspace(0, 10, 1000)
y = np.sin(x)
d = wt.kit.diff(x, y, 4)<|fim▁hole|> x = np.linspace(10, 0, 1000)
y = np.sin(x)
d = wt.kit.diff(x, y)
assert np.all((np.abs(d - np.cos(x)) < 0.0001)[1:-1])
def test_descending_3():
x = np.linspace(10, 0, 1000)
y = np.sin(x)
d = wt.kit.diff(x, y, 3)
assert np.all((np.abs(d + np.cos(x)) < 0.0001)[3:-3])<|fim▁end|>
|
assert np.all((np.abs(d - np.sin(x)) < 0.0001)[3:-4])
def test_descending_1():
|
<|file_name|>test_projection.py<|end_file_name|><|fim▁begin|>from __future__ import print_function, absolute_import, division
import warnings
import pytest
import numpy as np
from astropy import units as u
from astropy.wcs import WCS
from astropy.io import fits
from radio_beam import Beam, Beams
from .helpers import assert_allclose
from .test_spectral_cube import cube_and_raw
from ..spectral_cube import SpectralCube
from ..masks import BooleanArrayMask
from ..lower_dimensional_structures import (Projection, Slice, OneDSpectrum,
VaryingResolutionOneDSpectrum)
from ..utils import SliceWarning, WCSCelestialError, BeamUnitsError
from . import path
# needed for regression in numpy
import sys
try:
from astropy.utils.compat import NUMPY_LT_1_22
except ImportError:
# if astropy is an old version, we'll just skip the test
# (this is only used in one place)
NUMPY_LT_1_22 = False
# set up for parametrization
LDOs = (Projection, Slice, OneDSpectrum)
LDOs_2d = (Projection, Slice,)
two_qty_2d = np.ones((2,2)) * u.Jy
twelve_qty_2d = np.ones((12,12)) * u.Jy
two_qty_1d = np.ones((2,)) * u.Jy
twelve_qty_1d = np.ones((12,)) * u.Jy
data_two = (two_qty_2d, two_qty_2d, two_qty_1d)
data_twelve = (twelve_qty_2d, twelve_qty_2d, twelve_qty_1d)
data_two_2d = (two_qty_2d, two_qty_2d,)
data_twelve_2d = (twelve_qty_2d, twelve_qty_2d,)
def load_projection(filename):
hdu = fits.open(filename)[0]
proj = Projection.from_hdu(hdu)
return proj, hdu
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs_2d, data_two_2d))
def test_slices_of_projections_not_projections(LDO, data):
# slices of projections that have <2 dimensions should not be projections
p = LDO(data, copy=False)
assert not isinstance(p[0,0], LDO)
assert not isinstance(p[0], LDO)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs_2d, data_twelve_2d))
def test_copy_false(LDO, data):
# copy the data so we can manipulate inplace without affecting other tests
image = data.copy()
p = LDO(image, copy=False)
image[3,4] = 2 * u.Jy
assert_allclose(p[3,4], 2 * u.Jy)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_write(LDO, data, tmpdir):
p = LDO(data)
p.write(tmpdir.join('test.fits').strpath)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs_2d, data_twelve_2d))
def test_preserve_wcs_to(LDO, data):
# regression for #256
image = data.copy()
p = LDO(image, copy=False)
image[3,4] = 2 * u.Jy
p2 = p.to(u.mJy)
assert_allclose(p[3,4], 2 * u.Jy)
assert_allclose(p[3,4], 2000 * u.mJy)
assert p2.wcs == p.wcs
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_multiplication(LDO, data):
# regression: 265
p = LDO(data, copy=False)
p2 = p * 5
assert p2.unit == u.Jy
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value == 5)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_unit_division(LDO, data):
# regression: 265
image = data
p = LDO(image, copy=False)
p2 = p / u.beam
assert p2.unit == u.Jy/u.beam
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs_2d, data_twelve_2d))
def test_isnan(LDO, data):
# Check that np.isnan strips units
image = data.copy()
image[5,6] = np.nan
p = LDO(image, copy=False)
mask = np.isnan(p)
assert mask.sum() == 1
assert not hasattr(mask, 'unit')
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_self_arith(LDO, data):
image = data
p = LDO(image, copy=False)
p2 = p + p
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value==2)
p2 = p - p
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value==0)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_self_arith_with_beam(LDO, data):
exp_beam = Beam(1.0 * u.arcsec)
image = data
p = LDO(image, copy=False)
p = p.with_beam(exp_beam)
p2 = p + p
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value==2)
assert p2.beam == exp_beam
p2 = p - p
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value==0)
assert p2.beam == exp_beam
@pytest.mark.xfail(raises=ValueError, strict=True)
def test_VRODS_wrong_beams_shape():
'''
Check that passing Beams with a different shape than the data
is caught.
'''
exp_beams = Beams(np.arange(1, 4) * u.arcsec)
p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False,
beams=exp_beams)
def test_VRODS_with_beams():
exp_beams = Beams(np.arange(1, twelve_qty_1d.size + 1) * u.arcsec)
p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False, beams=exp_beams)
assert (p.beams == exp_beams).all()
new_beams = Beams(np.arange(2, twelve_qty_1d.size + 2) * u.arcsec)
p = p.with_beams(new_beams)
assert np.all(p.beams == new_beams)
def test_VRODS_slice_with_beams():
exp_beams = Beams(np.arange(1, twelve_qty_1d.size + 1) * u.arcsec)
p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False,
wcs=WCS(naxis=1),
beams=exp_beams)
assert np.all(p[:5].beams == exp_beams[:5])
def test_VRODS_arith_with_beams():
exp_beams = Beams(np.arange(1, twelve_qty_1d.size + 1) * u.arcsec)
p = VaryingResolutionOneDSpectrum(twelve_qty_1d, copy=False, beams=exp_beams)
p2 = p + p
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value==2)
assert np.all(p2.beams == exp_beams)
p2 = p - p
assert hasattr(p2, '_wcs')
assert p2.wcs == p.wcs
assert np.all(p2.value==0)
assert np.all(p2.beams == exp_beams)
def test_onedspectrum_specaxis_units():
test_wcs = WCS(naxis=1)
test_wcs.wcs.cunit = ["m/s"]
test_wcs.wcs.ctype = ["VELO-LSR"]
p = OneDSpectrum(twelve_qty_1d, wcs=test_wcs)
assert p.spectral_axis.unit == u.Unit("m/s")
def test_onedspectrum_with_spectral_unit():
test_wcs = WCS(naxis=1)
test_wcs.wcs.cunit = ["m/s"]
test_wcs.wcs.ctype = ["VELO-LSR"]
p = OneDSpectrum(twelve_qty_1d, wcs=test_wcs)
p_new = p.with_spectral_unit(u.km/u.s)
assert p_new.spectral_axis.unit == u.Unit("km/s")
np.testing.assert_equal(p_new.spectral_axis.value,
1e-3*p.spectral_axis.value)
def test_onedspectrum_input_mask_type():
test_wcs = WCS(naxis=1)
test_wcs.wcs.cunit = ["m/s"]
test_wcs.wcs.ctype = ["VELO-LSR"]
np_mask = np.ones(twelve_qty_1d.shape, dtype=bool)
np_mask[1] = False
bool_mask = BooleanArrayMask(np_mask, wcs=test_wcs,
shape=np_mask.shape)
# numpy array
p = OneDSpectrum(twelve_qty_1d, wcs=test_wcs,
mask=np_mask)
assert (p.mask.include() == bool_mask.include()).all()
# MaskBase
p = OneDSpectrum(twelve_qty_1d, wcs=test_wcs,
mask=bool_mask)
assert (p.mask.include() == bool_mask.include()).all()
# No mask
ones_mask = BooleanArrayMask(np.ones(twelve_qty_1d.shape, dtype=bool),
wcs=test_wcs, shape=np_mask.shape)
p = OneDSpectrum(twelve_qty_1d, wcs=test_wcs,
mask=None)
assert (p.mask.include() == ones_mask.include()).all()
def test_slice_tricks():
test_wcs_1 = WCS(naxis=1)
test_wcs_2 = WCS(naxis=2)
spec = OneDSpectrum(twelve_qty_1d, wcs=test_wcs_1)
im = Slice(twelve_qty_2d, wcs=test_wcs_2)
with warnings.catch_warnings(record=True) as w:
new = spec[:,None,None] * im[None,:,:]
assert new.ndim == 3
# two warnings because we're doing BOTH slices!
assert len(w) == 2
assert w[0].category == SliceWarning
with warnings.catch_warnings(record=True) as w:
new = spec.array[:,None,None] * im.array[None,:,:]
assert new.ndim == 3
assert len(w) == 0
def test_array_property():
test_wcs_1 = WCS(naxis=1)
spec = OneDSpectrum(twelve_qty_1d, wcs=test_wcs_1)
arr = spec.array
# these are supposed to be the same object, but the 'is' tests fails!
assert spec.array.data == spec.data
assert isinstance(arr, np.ndarray)
assert not isinstance(arr, u.Quantity)
def test_quantity_property():
test_wcs_1 = WCS(naxis=1)
spec = OneDSpectrum(twelve_qty_1d, wcs=test_wcs_1)
arr = spec.quantity
# these are supposed to be the same object, but the 'is' tests fails!
assert spec.array.data == spec.data
assert isinstance(arr, u.Quantity)
assert not isinstance(arr, OneDSpectrum)
def test_projection_with_beam(data_55):
exp_beam = Beam(1.0 * u.arcsec)
proj, hdu = load_projection(data_55)
# uses from_hdu, which passes beam as kwarg
assert proj.beam == exp_beam
assert proj.meta['beam'] == exp_beam
# load beam from meta
exp_beam = Beam(1.5 * u.arcsec)
meta = {"beam": exp_beam}
new_proj = Projection(hdu.data, wcs=proj.wcs, meta=meta)
assert new_proj.beam == exp_beam
assert new_proj.meta['beam'] == exp_beam
# load beam from given header
exp_beam = Beam(2.0 * u.arcsec)
header = hdu.header.copy()
header = exp_beam.attach_to_header(header)
new_proj = Projection(hdu.data, wcs=proj.wcs, header=header,
read_beam=True)
assert new_proj.beam == exp_beam
assert new_proj.meta['beam'] == exp_beam
# load beam from beam object
exp_beam = Beam(3.0 * u.arcsec)
header = hdu.header.copy()
del header["BMAJ"], header["BMIN"], header["BPA"]
new_proj = Projection(hdu.data, wcs=proj.wcs, header=header,
beam=exp_beam)
assert new_proj.beam == exp_beam
assert new_proj.meta['beam'] == exp_beam
# Slice the projection with a beam and check it's still there
assert new_proj[:1, :1].beam == exp_beam
def test_ondespectrum_with_beam():
exp_beam = Beam(1.0 * u.arcsec)
test_wcs_1 = WCS(naxis=1)
spec = OneDSpectrum(twelve_qty_1d, wcs=test_wcs_1)
# load beam from meta
meta = {"beam": exp_beam}
new_spec = OneDSpectrum(spec.data, wcs=spec.wcs, meta=meta)
assert new_spec.beam == exp_beam
assert new_spec.meta['beam'] == exp_beam
# load beam from given header
hdu = spec.hdu
exp_beam = Beam(2.0 * u.arcsec)
header = hdu.header.copy()
header = exp_beam.attach_to_header(header)
new_spec = OneDSpectrum(hdu.data, wcs=spec.wcs, header=header,
read_beam=True)
assert new_spec.beam == exp_beam
assert new_spec.meta['beam'] == exp_beam
# load beam from beam object
exp_beam = Beam(3.0 * u.arcsec)
header = hdu.header.copy()
new_spec = OneDSpectrum(hdu.data, wcs=spec.wcs, header=header,
beam=exp_beam)
assert new_spec.beam == exp_beam
assert new_spec.meta['beam'] == exp_beam
# Slice the spectrum with a beam and check it's still there
assert new_spec[:1].beam == exp_beam
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_ldo_attach_beam(LDO, data):
exp_beam = Beam(1.0 * u.arcsec)
newbeam = Beam(2.0 * u.arcsec)
p = LDO(data, copy=False, beam=exp_beam)
new_p = p.with_beam(newbeam)
assert p.beam == exp_beam
assert p.meta['beam'] == exp_beam
assert new_p.beam == newbeam
assert new_p.meta['beam'] == newbeam
@pytest.mark.xfail(raises=BeamUnitsError, strict=True)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs, data_twelve))
def test_ldo_attach_beam_jybm_error(LDO, data):
exp_beam = Beam(1.0 * u.arcsec)
newbeam = Beam(2.0 * u.arcsec)
data = data.value * u.Jy / u.beam
p = LDO(data, copy=False, beam=exp_beam)
# Attaching with no beam should work.
new_p = p.with_beam(newbeam)
# Trying to change the beam should now raise a BeamUnitsError
new_p = new_p.with_beam(newbeam)
@pytest.mark.parametrize(('LDO', 'data'),
zip(LDOs_2d, data_two_2d))
def test_projection_from_hdu(LDO, data):
p = LDO(data, copy=False)
hdu = p.hdu
p_new = LDO.from_hdu(hdu)
assert (p == p_new).all()
def test_projection_subimage(data_55):
proj, hdu = load_projection(data_55)
proj1 = proj.subimage(xlo=1, xhi=3)
proj2 = proj.subimage(xlo=24.06269 * u.deg,
xhi=24.06206 * u.deg)
proj3 = proj.subimage(xlo=24.06269*u.deg, xhi=3)
proj4 = proj.subimage(xlo=1, xhi=24.06206*u.deg)
assert proj1.shape == (5, 2)
assert proj2.shape == (5, 2)
assert proj3.shape == (5, 2)
assert proj4.shape == (5, 2)
assert proj1.wcs.wcs.compare(proj2.wcs.wcs)
assert proj1.wcs.wcs.compare(proj3.wcs.wcs)
assert proj1.wcs.wcs.compare(proj4.wcs.wcs)
assert proj.beam == proj1.beam
assert proj.beam == proj2.beam
proj4 = proj.subimage(ylo=1, yhi=3)
proj5 = proj.subimage(ylo=29.93464 * u.deg,
yhi=29.93522 * u.deg)
proj6 = proj.subimage(ylo=1, yhi=29.93522 * u.deg)
proj7 = proj.subimage(ylo=29.93464 * u.deg, yhi=3)
assert proj4.shape == (2, 5)
assert proj5.shape == (2, 5)
assert proj6.shape == (2, 5)
assert proj7.shape == (2, 5)
assert proj4.wcs.wcs.compare(proj5.wcs.wcs)
assert proj4.wcs.wcs.compare(proj6.wcs.wcs)
assert proj4.wcs.wcs.compare(proj7.wcs.wcs)
# Test mixed slicing in both spatial directions
proj1xy = proj.subimage(xlo=1, xhi=3, ylo=1, yhi=3)
proj2xy = proj.subimage(xlo=24.06269*u.deg, xhi=3,
ylo=1,yhi=29.93522 * u.deg)
proj3xy = proj.subimage(xlo=1, xhi=24.06206*u.deg,
ylo=29.93464 * u.deg, yhi=3)
assert proj1xy.shape == (2, 2)
assert proj2xy.shape == (2, 2)
assert proj3xy.shape == (2, 2)
assert proj1xy.wcs.wcs.compare(proj2xy.wcs.wcs)
assert proj1xy.wcs.wcs.compare(proj3xy.wcs.wcs)
proj5 = proj.subimage()
assert proj5.shape == proj.shape
assert proj5.wcs.wcs.compare(proj.wcs.wcs)
assert np.all(proj5.value == proj.value)
def test_projection_subimage_nocelestial_fail(data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
proj = cube.moment0(axis=1)
with pytest.raises(WCSCelestialError,
match="WCS does not contain two spatial axes."):
proj.subimage(xlo=1, xhi=3)
@pytest.mark.parametrize('LDO', LDOs_2d)
def test_twod_input_mask_type(LDO):
test_wcs = WCS(naxis=2)
test_wcs.wcs.cunit = ["deg", "deg"]
test_wcs.wcs.ctype = ["RA---SIN", 'DEC--SIN']
np_mask = np.ones(twelve_qty_2d.shape, dtype=bool)
np_mask[1] = False
bool_mask = BooleanArrayMask(np_mask, wcs=test_wcs,
shape=np_mask.shape)
# numpy array
p = LDO(twelve_qty_2d, wcs=test_wcs,
mask=np_mask)
assert (p.mask.include() == bool_mask.include()).all()
# MaskBase
p = LDO(twelve_qty_2d, wcs=test_wcs,
mask=bool_mask)
assert (p.mask.include() == bool_mask.include()).all()
# No mask
ones_mask = BooleanArrayMask(np.ones(twelve_qty_2d.shape, dtype=bool),
wcs=test_wcs, shape=np_mask.shape)
p = LDO(twelve_qty_2d, wcs=test_wcs,
mask=None)
assert (p.mask.include() == ones_mask.include()).all()
@pytest.mark.xfail
def test_mask_convolve():
# Numpy is fundamentally incompatible with the objects we have created.
# np.ma.is_masked(array) checks specifically for the array's _mask
# attribute. We would have to refactor deeply to correct this, and I
# really don't want to do that because 'None' is a much more reasonable
# and less dangerous default for a mask.
test_wcs_1 = WCS(naxis=1)
spec = OneDSpectrum(twelve_qty_1d, wcs=test_wcs_1)
assert spec.mask is False
from astropy.convolution import convolve,Box1DKernel
convolve(spec, Box1DKernel(3))
def test_convolve():
test_wcs_1 = WCS(naxis=1)
spec = OneDSpectrum(twelve_qty_1d, wcs=test_wcs_1)
from astropy.convolution import Box1DKernel
specsmooth = spec.spectral_smooth(Box1DKernel(1))
np.testing.assert_allclose(spec, specsmooth)
def test_spectral_interpolate():
test_wcs_1 = WCS(naxis=1)
test_wcs_1.wcs.cunit[0] = 'GHz'
spec = OneDSpectrum(np.arange(12)*u.Jy, wcs=test_wcs_1)
new_xaxis = test_wcs_1.wcs_pix2world(np.linspace(0,11,23), 0)[0] * u.Unit(test_wcs_1.wcs.cunit[0])
new_spec = spec.spectral_interpolate(new_xaxis)
np.testing.assert_allclose(new_spec, np.linspace(0,11,23)*u.Jy)
def test_spectral_interpolate_with_mask(data_522_delta, use_dask):
hdu = fits.open(data_522_delta)[0]
# Swap the velocity axis so indiff < 0 in spectral_interpolate
hdu.header["CDELT3"] = - hdu.header["CDELT3"]
cube = SpectralCube.read(hdu, use_dask=use_dask)
mask = np.ones(cube.shape, dtype=bool)
mask[:2] = False
masked_cube = cube.with_mask(mask)
spec = masked_cube[:, 0, 0]
# midpoint between each position
sg = (spec.spectral_axis[1:] + spec.spectral_axis[:-1])/2.
result = spec.spectral_interpolate(spectral_grid=sg[::-1])
# The output makes CDELT3 > 0 (reversed spectral axis) so the masked
# portion are the final 2 channels.
np.testing.assert_almost_equal(result.filled_data[:].value,
[0.0, 0.5, np.NaN, np.NaN])
def test_spectral_interpolate_reversed(data_522_delta, use_dask):
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
# Reverse spectral axis
sg = cube.spectral_axis[::-1]
spec = cube[:, 0, 0]
result = spec.spectral_interpolate(spectral_grid=sg)
np.testing.assert_almost_equal(sg.value, result.spectral_axis.value)
def test_spectral_interpolate_with_fillvalue(data_522_delta, use_dask):
cube, data = cube_and_raw(data_522_delta, use_dask=use_dask)
# Step one channel out of bounds.
sg = ((cube.spectral_axis[0]) -
(cube.spectral_axis[1] - cube.spectral_axis[0]) *
np.linspace(1,4,4))
spec = cube[:, 0, 0]
result = spec.spectral_interpolate(spectral_grid=sg,
fill_value=42)
np.testing.assert_almost_equal(result.value,
np.ones(4)*42)
def test_spectral_units(data_255_delta, use_dask):
# regression test for issue 391
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
sp = cube[:,0,0]
assert sp.spectral_axis.unit == u.km/u.s
assert sp.header['CUNIT1'] == 'km s-1'
sp = cube.with_spectral_unit(u.m/u.s)[:,0,0]
assert sp.spectral_axis.unit == u.m/u.s
assert sp.header['CUNIT1'] in ('m s-1', 'm/s')
def test_repr_1d(data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
sp = cube[:,0,0]
print(sp)
print(sp[1:-1])
assert 'OneDSpectrum' in sp.__repr__()
assert 'OneDSpectrum' in sp[1:-1].__repr__()
def test_1d_slices(data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
sp = cube[:,0,0]
assert sp.max() == cube.max(axis=0)[0,0]
assert not isinstance(sp.max(), OneDSpectrum)
sp = cube[:-1,0,0]
assert sp.max() == cube[:-1,:,:].max(axis=0)[0,0]
assert not isinstance(sp.max(), OneDSpectrum)
# TODO: Unpin when Numpy bug is resolved.
@pytest.mark.skipif(not NUMPY_LT_1_22 and sys.platform == 'win32',
reason='https://github.com/numpy/numpy/issues/20699')
@pytest.mark.parametrize('method',
('min', 'max', 'std', 'mean', 'sum', 'cumsum',
'nansum', 'ptp', 'var'),
)
def test_1d_slice_reductions(method, data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
sp = cube[:,0,0]
if hasattr(cube, method):
spmethod = getattr(sp, method)
cubemethod = getattr(cube, method)
assert spmethod() == cubemethod(axis=0)[0,0]
else:
method = getattr(sp, method)
result = method()
assert hasattr(sp, '_fill_value')
assert 'OneDSpectrum' in sp.__repr__()
assert 'OneDSpectrum' in sp[1:-1].__repr__()
def test_1d_slice_round(data_255_delta, use_dask):
cube, data = cube_and_raw(data_255_delta, use_dask=use_dask)
sp = cube[:,0,0]
assert all(sp.value.round() == sp.round().value)
assert hasattr(sp, '_fill_value')
assert hasattr(sp.round(), '_fill_value')
assert 'OneDSpectrum' in sp.round().__repr__()
assert 'OneDSpectrum' in sp[1:-1].round().__repr__()
def test_LDO_arithmetic(data_vda, use_dask):
cube, data = cube_and_raw(data_vda, use_dask=use_dask)
sp = cube[:,0,0]
spx2 = sp * 2
assert np.all(spx2.value == sp.value*2)
assert np.all(spx2.filled_data[:].value == sp.value*2)
def test_beam_jtok_2D(data_advs, use_dask):
cube, data = cube_and_raw(data_advs, use_dask=use_dask)
cube._meta['BUNIT'] = 'Jy / beam'
cube._unit = u.Jy / u.beam
plane = cube[0]
freq = cube.with_spectral_unit(u.GHz).spectral_axis[0]
equiv = plane.beam.jtok_equiv(freq)
jtok = plane.beam.jtok(freq)
Kplane = plane.to(u.K, equivalencies=equiv, freq=freq)
np.testing.assert_almost_equal(Kplane.value,
(plane.value * jtok).value)
# test that the beam equivalencies are correctly automatically defined
Kplane = plane.to(u.K, freq=freq)
np.testing.assert_almost_equal(Kplane.value,
(plane.value * jtok).value)
bunits_list = [u.Jy / u.beam, u.K, u.Jy / u.sr, u.Jy / u.pix, u.Jy / u.arcsec**2,
u.mJy / u.beam, u.mK]
@pytest.mark.parametrize(('init_unit'), bunits_list)
def test_unit_conversions_general_2D(data_advs, use_dask, init_unit):
cube, data = cube_and_raw(data_advs, use_dask=use_dask)
cube._meta['BUNIT'] = init_unit.to_string()
cube._unit = init_unit
plane = cube[0]
# Check all unit conversion combos:
for targ_unit in bunits_list:
newplane = plane.to(targ_unit)
if init_unit == targ_unit:
np.testing.assert_almost_equal(newplane.value,
plane.value)
else:
roundtrip_plane = newplane.to(init_unit)
np.testing.assert_almost_equal(roundtrip_plane.value,
plane.value)
# TODO: Our 1D object do NOT retain spatial info that is needed for other BUNIT conversion
# e.g., Jy/sr, Jy/pix. So we're limited to Jy/beam -> K conversion for now
# See: https://github.com/radio-astro-tools/spectral-cube/pull/395
bunits_list_1D = [u.Jy / u.beam, u.K,
u.mJy / u.beam, u.mK]
@pytest.mark.parametrize(('init_unit'), bunits_list_1D)
def test_unit_conversions_general_1D(data_advs, use_dask, init_unit):
cube, data = cube_and_raw(data_advs, use_dask=use_dask)
cube._meta['BUNIT'] = init_unit.to_string()
cube._unit = init_unit
spec = cube[:, 0, 0]
# Check all unit conversion combos:
for targ_unit in bunits_list_1D:
newspec = spec.to(targ_unit)
if init_unit == targ_unit:
np.testing.assert_almost_equal(newspec.value,
spec.value)
else:
roundtrip_spec = newspec.to(init_unit)
np.testing.assert_almost_equal(roundtrip_spec.value,
spec.value)
@pytest.mark.parametrize(('init_unit'), bunits_list_1D)
def test_multibeams_unit_conversions_general_1D(data_vda_beams, use_dask, init_unit):
cube, data = cube_and_raw(data_vda_beams, use_dask=use_dask)
cube._meta['BUNIT'] = init_unit.to_string()
cube._unit = init_unit
spec = cube[:, 0, 0]
# Check all unit conversion combos:
for targ_unit in bunits_list_1D:
newspec = spec.to(targ_unit)
if init_unit == targ_unit:
np.testing.assert_almost_equal(newspec.value,
spec.value)
else:<|fim▁hole|>
def test_basic_arrayness(data_adv, use_dask):
cube, data = cube_and_raw(data_adv, use_dask=use_dask)
assert cube.shape == data.shape
spec = cube[:,0,0]
assert np.all(np.asanyarray(spec).value == data[:,0,0])
assert np.all(np.array(spec) == data[:,0,0])
assert np.all(np.asarray(spec) == data[:,0,0])
# These are commented out because it is presently not possible to convert
# projections to masked arrays
# assert np.all(np.ma.asanyarray(spec).value == data[:,0,0])
# assert np.all(np.ma.asarray(spec) == data[:,0,0])
# assert np.all(np.ma.array(spec) == data[:,0,0])
slc = cube[0,:,:]
assert np.all(np.asanyarray(slc).value == data[0,:,:])
assert np.all(np.array(slc) == data[0,:,:])
assert np.all(np.asarray(slc) == data[0,:,:])
# assert np.all(np.ma.asanyarray(slc).value == data[0,:,:])
# assert np.all(np.ma.asarray(slc) == data[0,:,:])
# assert np.all(np.ma.array(slc) == data[0,:,:])
def test_spatial_world_extrema_2D(data_522_delta, use_dask):
hdu = fits.open(data_522_delta)[0]
cube = SpectralCube.read(hdu, use_dask=use_dask)
plane = cube[0]
assert (cube.world_extrema == plane.world_extrema).all()
assert (cube.longitude_extrema == plane.longitude_extrema).all()
assert (cube.latitude_extrema == plane.latitude_extrema).all()
@pytest.mark.parametrize('view', (np.s_[:, :],
np.s_[::2, :],
np.s_[0]))
def test_spatial_world(view, data_adv, use_dask):
p = path(data_adv)
# d = fits.getdata(p)
# wcs = WCS(p)
# c = SpectralCube(d, wcs)
c = SpectralCube.read(p, use_dask=use_dask)
plane = c[0]
wcs = plane.wcs
shp = plane.shape
inds = np.indices(plane.shape)
pix = np.column_stack([i.ravel() for i in inds[::-1]])
world = wcs.all_pix2world(pix, 0).T
world = [w.reshape(shp) for w in world]
world = [w[view] * u.Unit(wcs.wcs.cunit[i])
for i, w in enumerate(world)][::-1]
w2 = plane.world[view]
for result, expected in zip(w2, world):
assert_allclose(result, expected)
# Test world_flattened here, too
# TODO: Enable once 2D masking is a thing
w2_flat = plane.flattened_world(view=view)
for result, expected in zip(w2_flat, world):
print(result.shape, expected.flatten().shape)
assert_allclose(result, expected.flatten())<|fim▁end|>
|
roundtrip_spec = newspec.to(init_unit)
np.testing.assert_almost_equal(roundtrip_spec.value,
spec.value)
|
<|file_name|>utils.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true<|fim▁hole|> case 'm':
return { c: 'M', x: p.x + c.dx, y: p.y + c.dy };
case 'z':
return { c: 'Z' };
case 'l':
return { c: 'L', x: p.x + c.dx, y: p.y + c.dy };
case 'h':
return { c: 'L', x: p.x + c.dx, y: p.y };
case 'v':
return { c: 'L', y: p.y + c.dy, x: p.x };
case 'H':
return { c: 'L', x: c.x, y: p.y };
case 'V':
return { c: 'L', y: c.y, x: p.x };
case 'c':
return { c: 'C', x1: p.x + c.dx1, y1: p.y + c.dy1, x2: p.x + c.dx2, y2: p.y + c.dy2, x: p.x + c.dx, y: p.y + c.dy };
case 's':
return { c: 'S', x2: p.x + c.dx2, y2: p.y + c.dy2, x: p.x + c.dx, y: p.y + c.dy };
case 'q':
return { c: 'Q', x1: p.x + c.dx1, y1: p.y + c.dy1, x: p.x + c.dx, y: p.y + c.dy };
case 't':
return { c: 'T', x: p.x + c.dx, y: p.y + c.dy };
case 'a':
return { c: 'A', rx: c.rx, ry: c.ry, xAxisRotation: c.xAxisRotation, largeArcFlag: c.largeArcFlag, sweepFlag: c.sweepFlag, x: p.x + c.dx, y: p.y + c.dy };
default:
return c;
}
};
var applyCommand = exports.applyCommand = function applyCommand(position, begin, c) {
var dif = { dx: 0, dy: 0 };
if (c.c === 'm') dif = c;else if (c.c === 'l') dif = c;else if (c.c === 'c') dif = c;else if (c.c === 's') dif = c;else if (c.c === 'q') dif = c;else if (c.c === 't') dif = c;else if (c.c === 'a') dif = c;else if (c.c === 'h') dif = { dx: c.dx, dy: 0 };else if (c.c === 'v') dif = { dx: 0, dy: c.dy };else if (c.c === 'z') dif = { dx: begin.x - position.x, dy: begin.y - position.y };else if (c.c === 'V') return { x: position.x, y: c.y };else if (c.c === 'H') return { x: c.x, y: position.y };else if (c.c === 'Z') return begin;else {
return c;
}
return { x: position.x + dif.dx, y: position.y + dif.dy };
};
var normalizeData = exports.normalizeData = function normalizeData(d) {
var begin = { x: 0, y: 0 };
var position = { x: 0, y: 0 };
var result = [];
for (var i = 0; i < d.length; i++) {
var command = d[i];
var absoluteCommand = makeCommandAbsolute(position, command);
var newPosition = applyCommand(position, begin, absoluteCommand);
// Filter line commands which doesn't change position
var isLineCommand = absoluteCommand.c === 'L' || absoluteCommand.c === 'Z';
if (!isLineCommand || !pointEquals(newPosition, position)) {
result.push(absoluteCommand);
position = newPosition;
}
if (absoluteCommand.c === 'M') {
begin = absoluteCommand;
} else if (absoluteCommand.c === 'm') {
begin = applyCommand(position, begin, absoluteCommand);
}
}
return result;
};
var getSubPaths = exports.getSubPaths = function getSubPaths(d) {
if (d.length === 0) {
return [];
} else if (d[0].c !== 'M' && d[0].c !== 'm') {
throw new Error('Path must start with an "M" or "m" command, not "' + d[0].c + '" ');
}
var result = [];
var nextSubPath = [];
var lastM = { c: 'M', x: 0, y: 0 };
d.forEach(function (command) {
if (command.c === 'M') {
if (nextSubPath.length > 0) {
result.push(nextSubPath);
}
nextSubPath = [command];
lastM = command;
} else if (command.c === 'Z') {
nextSubPath.push(command);
result.push(nextSubPath);
nextSubPath = [];
} else {
if (nextSubPath.length === 0) {
nextSubPath.push(lastM);
}
nextSubPath.push(command);
}
});
if (nextSubPath.length > 0) {
result.push(nextSubPath);
}
return result;
};
var isSubPathClosed = exports.isSubPathClosed = function isSubPathClosed(begin, d) {
if (d.length < 2) {
return true;
}
var lastCommand = d[d.length - 1];
if (lastCommand.c === 'Z') {
return true;
}
return lastCommand.x === begin.x && lastCommand.y === begin.y;
};
var pointEquals = exports.pointEquals = function pointEquals(p1, p2) {
return p1.x === p2.x && p1.y === p2.y;
};<|fim▁end|>
|
});
var makeCommandAbsolute = exports.makeCommandAbsolute = function makeCommandAbsolute(p, c) {
switch (c.c) {
|
<|file_name|>cli.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
/**
*
* Copyright 2014-2019 David Herron
*
* This file is part of AkashaCMS (http://akashacms.com/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
const program = require('commander');
const ghpages = require('gh-pages');
const fs = require('fs');
const fsp = require('fs/promises');
const path = require('path');
const util = require('util');
const filez = require('./filez');
const data = require('./data');
// Note this is an ES6 module and to use it we must
// use an async function along with the await keyword
const _filecache = import('./cache/file-cache.mjs');
const _watchman = import('./cache/watchman.mjs');
process.title = 'akasharender';
program.version('0.7.5');
program
.command('copy-assets <configFN>')
.description('Copy assets into output directory')
.action(async (configFN) => {<|fim▁hole|> await akasha.cacheSetup(config);
await akasha.setupAssets(config);
let filecache = await _filecache;
await filecache.assets.isReady();
await config.copyAssets();
await akasha.closeCaches();
} catch (e) {
console.error(`copy-assets command ERRORED ${e.stack}`);
}
});
program
.command('document <configFN> <documentFN>')
.description('Show information about a document')
.action(async (configFN, documentFN) => {
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupDocuments(config);
const filecache = await _filecache;
const documents = filecache.documents;
await documents.isReady();
const doc = await documents.find(documentFN);
// data: ${doc.data}
// text: ${doc.text}
console.log(`
basedir: ${doc.basedir}
docpath: ${doc.docpath}
fspath: ${doc.fspath}
renderer: ${util.inspect(doc.renderer)}
renderpath: ${doc.renderpath}
metadata: ${util.inspect(doc.metadata)}
`);
await akasha.closeCaches();
} catch (e) {
console.error(`document command ERRORED ${e.stack}`);
}
});
program
.command('render-document <configFN> <documentFN>')
.description('Render a document into output directory')
.action(async (configFN, documentFN) => {
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetupComplete(config);
data.init();
console.log(`render-document before renderPath ${documentFN}`);
let result = await akasha.renderPath(config, documentFN);
console.log(result);
await akasha.closeCaches();
} catch (e) {
console.error(`render-document command ERRORED ${e.stack}`);
}
});
program
.command('render <configFN>')
.description('Render a site into output directory')
.option('--quiet', 'Do not print the rendering report')
.option('--results-to <resultFile>', 'Store the results into the named file')
.option('--perfresults <perfResultsFile>', 'Store the time to render each document')
.action(async (configFN, cmdObj) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetupComplete(config);
data.init();
let results = await akasha.render(config);
if (!cmdObj.quiet) {
for (let result of results) {
// TODO --- if AKASHARENDER_TRACE_RENDER then output tracing data
// TODO --- also set process.env.GLOBFS_TRACE=1
if (result.error) {
console.error(result.error);
} else {
console.log(result.result);
// console.log(util.inspect(result.result));
}
}
}
if (cmdObj.resultsTo) {
const output = fs.createWriteStream(cmdObj.resultsTo);
for (let result of results) {
if (result.error) {
output.write('****ERROR '+ result.error + '\n');
} else {
output.write(result.result + '\n');
// console.log(util.inspect(result.result));
}
}
output.close();
}
if (cmdObj.perfresults) {
const output = fs.createWriteStream(cmdObj.perfresults);
for (let result of results) {
if (result.error) {
// Ignore
} else if (result.result.startsWith('COPY')) {
// Ignore
} else {
let results = result.result.split('\n');
let perf = results[0];
let matches = perf.match(/.* ==> (.*) \(([0-9\.]+) seconds\)$/);
if (!matches) continue;
if (matches.length < 3) continue;
let fn = matches[1];
let time = matches[2];
let report = `${time} ${fn}`;
for (let i = 1; i < results.length; i++) {
let stages = results[i].match(/(FRONTMATTER|FIRST RENDER|SECOND RENDER|MAHABHUTA|RENDERED) ([0-9\.]+) seconds$/);
if (!stages || stages.length < 3) continue;
report += ` ${stages[1]} ${stages[2]}`;
}
output.write(`${report}\n`);
}
}
output.close();
}
await akasha.closeCaches();
} catch (e) {
console.error(`render command ERRORED ${e.stack}`);
}
});
program
.command('explain <configFN>')
.description('Explain a cache query')
.action(async (configFN) => {
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupDocuments(config);
let filecache = await _filecache;
await filecache.documents.isReady();
data.init();
let explanation = filecache.documents
.getCollection(filecache.documents.collection)
.explain(
/* {
$or: [
{ vpath: { $eeq: "archive/java.net/2005/08/findbugs.html.md" } },
{ renderPath: { $eeq: "archive/java.net/2005/08/findbugs.html" } }
]
} */
{
renderPath: /\.html$/,
vpath: /^blog\/2019\//,
docMetadata: {
layout: {
$in: [
"blog.html.ejs",
"blog.html.njk",
"blog.html.handlebars"
]
},
blogtag: { $eeq: "news" }
}
}
);
// console.log(JSON.stringify(explanation, undefined, ' '));
console.log(`EXPLAINING ${explanation.operation} results: ${explanation.results}`);
console.log('Analysis ', explanation.analysis);
console.log('Analysis - query ', explanation.analysis.query);
console.log('Steps ', explanation.steps);
console.log('Time ', explanation.time);
console.log('Index ', explanation.index);
console.log('Log ', explanation.log);
await akasha.closeCaches();
} catch (e) {
console.error(`render command ERRORED ${e.stack}`);
}
});
program
.command('watch <configFN>')
.description('Track changes to files in a site, and rebuild anything that changes')
.action(async (configFN, cmdObj) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetupComplete(config);
data.init();
// console.log('CALLING config.hookBeforeSiteRendered');
await config.hookBeforeSiteRendered();
const watchman = (await _watchman).watchman;
await watchman(config);
// await akasha.closeCaches();
} catch (e) {
console.error(`watch command ERRORED ${e.stack}`);
}
});
program
.command('gh-pages-publish <configFN>')
.description('Publish a site using Github Pages. Takes the rendering destination, adds it into a branch, and pushes that to Github')
.option('-b, --branch <branchName>', 'The branch to use for publishing to Github')
.option('-r, --repo <repoURL>', 'The repository URL to use if it must differ from the URL of the local directory')
.option('--remote <remoteName>', 'The Git remote name to use if it must differ from "origin"')
.option('--tag <tag>', 'Any tag to add when pushing to Github')
.option('--message <message>', 'Any Git commit message')
.option('--username <username>', 'Github user name to use')
.option('--email <email>', 'Github user email to use')
.option('--nopush', 'Do not push to Github, only commit')
.action(async (configFN, cmdObj) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
let options = {
dotfiles: true
};
if (cmdObj.branch) {
options.branch = cmdObj.branch;
}
if (cmdObj.repoURL) {
options.repo = cmdObj.repoURL;
}
if (cmdObj.remote) {
options.remote = cmdObj.remote;
}
if (cmdObj.tag) {
options.tag = cmdObj.tag;
}
if (cmdObj.message) {
options.message = cmdObj.message;
}
if (cmdObj.username || cmdObj.email) {
options.user = {};
}
if (cmdObj.username) {
options.user.name = cmdObj.username;
}
if (cmdObj.email) {
options.user.email = cmdObj.email;
}
if (cmdObj.nopush) {
options.push = false;
}
// console.log(`gh-pages-publish options ${config.renderDestination} cmdObj ${util.inspect(cmdObj)} options ${util.inspect(options)}`);
ghpages.publish(config.renderDestination, options, function(err) {
if (err) console.error(err);
else console.log('OK');
});
} catch (e) {
console.error(`gh-pages-publish command ERRORED ${e.stack}`);
}
});
program
.command('config <configFN>')
.description('Print a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
console.log(config);
} catch (e) {
console.error(`config command ERRORED ${e.stack}`);
}
});
program
.command('docdirs <configFN>')
.description('List the documents directories in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
console.log(config.documentDirs);
} catch (e) {
console.error(`docdirs command ERRORED ${e.stack}`);
}
});
program
.command('assetdirs <configFN>')
.description('List the assets directories in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
console.log(config.assetDirs);
} catch (e) {
console.error(`assetdirs command ERRORED ${e.stack}`);
}
});
program
.command('partialdirs <configFN>')
.description('List the partials directories in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
console.log(config.partialsDirs);
} catch (e) {
console.error(`partialdirs command ERRORED ${e.stack}`);
}
});
program
.command('layoutsdirs <configFN>')
.description('List the layouts directories in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
console.log(config.layoutDirs);
} catch (e) {
console.error(`layoutsdirs command ERRORED ${e.stack}`);
}
});
program
.command('documents <configFN>')
.description('List the documents in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupDocuments(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.documents.isReady();
console.log(filecache.documents.paths());
await akasha.closeCaches();
} catch (e) {
console.error(`documents command ERRORED ${e.stack}`);
}
});
program
.command('docinfo <configFN> <docFN>')
.description('Show information about a document in a site configuration')
.action(async (configFN, docFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupDocuments(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.documents.isReady();
console.log(`docFN ${docFN} `, filecache.documents.find(docFN));
await akasha.closeCaches();
} catch (e) {
console.error(`docinfo command ERRORED ${e.stack}`);
}
});
program
.command('tags <configFN>')
.description('List the tags')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupDocuments(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.documents.isReady();
console.log(filecache.documents.tags());
await akasha.closeCaches();
} catch (e) {
console.error(`docinfo command ERRORED ${e.stack}`);
}
});
program
.command('search <configFN>')
.description('Search for documents')
.option('--root <rootPath>', 'Select only files within the named directory')
.option('--match <pathmatch>', 'Select only files matching the regular expression')
.option('--glob <globmatch>', 'Select only files matching the glob expression')
.option('--renderglob <globmatch>', 'Select only files rendering to the glob expression')
.option('--layout <layout>', 'Select only files matching the layouts')
.option('--mime <mime>', 'Select only files matching the MIME type')
.option('--tag <tag>', 'Select only files with the tag')
.action(async (configFN, cmdObj) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupDocuments(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.documents.isReady();
// console.log(cmdObj);
let options = { };
if (cmdObj.root) options.rootPath = cmdObj.root;
if (cmdObj.match) options.pathmatch = cmdObj.match;
if (cmdObj.glob) options.glob = cmdObj.glob;
if (cmdObj.renderglob) options.renderglob = cmdObj.renderglob;
if (cmdObj.layout) options.layouts = cmdObj.layout;
if (cmdObj.mime) options.mime = cmdObj.mime;
if (cmdObj.tag) options.tag = cmdObj.tag;
// console.log(options);
let docs = filecache.documents.search(config, options);
console.log(docs);
await akasha.closeCaches();
} catch (e) {
console.error(`search command ERRORED ${e.stack}`);
}
});
program
.command('assets <configFN>')
.description('List the assets in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupAssets(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.assets.isReady();
console.log(filecache.assets.paths());
await akasha.closeCaches();
} catch (e) {
console.error(`assets command ERRORED ${e.stack}`);
}
});
program
.command('assetinfo <configFN> <docFN>')
.description('Show information about an asset in a site configuration')
.action(async (configFN, assetFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupAssets(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.assets.isReady();
console.log(filecache.assets.find(assetFN));
await akasha.closeCaches();
} catch (e) {
console.error(`assetinfo command ERRORED ${e.stack}`);
}
});
program
.command('layouts <configFN>')
.description('List the layouts in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupLayouts(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.layouts.isReady();
console.log(filecache.layouts.paths());
await akasha.closeCaches();
} catch (e) {
console.error(`layouts command ERRORED ${e.stack}`);
}
});
// TODO both test.html and test.html.njk match
// This is probably incorrect, since we do not render these files
// The partials directory has the same problem
// Some kind of flag on creating the FileCache to change the behavior
program
.command('layoutinfo <configFN> <docFN>')
.description('Show information about a layout in a site configuration')
.action(async (configFN, layoutFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupLayouts(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.layouts.isReady();
console.log(filecache.layouts.find(layoutFN));
await akasha.closeCaches();
} catch (e) {
console.error(`layoutinfo command ERRORED ${e.stack}`);
}
});
program
.command('partials <configFN>')
.description('List the partials in a site configuration')
.action(async (configFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
await akasha.cacheSetup(config);
await akasha.setupPartials(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.partials.isReady();
await akasha.setupPluginCaches(config)
console.log(filecache.partials.paths());
await akasha.closeCaches();
} catch (e) {
console.error(`partials command ERRORED ${e.stack}`);
}
});
// TODO both test.html and test.html.njk match
// This is probably incorrect, since we do not render these files
program
.command('partialinfo <configFN> <docFN>')
.description('Show information about a partial in a site configuration')
.action(async (configFN, partialFN) => {
// console.log(`render: akasha: ${util.inspect(akasha)}`);
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
// await akasha.cacheSetupComplete(config);
await akasha.cacheSetup(config);
await akasha.setupPartials(config);
let filecache = await _filecache;
// console.log(filecache.documents);
await filecache.partials.isReady();
console.log(filecache.partials.find(partialFN));
await akasha.closeCaches();
} catch (e) {
console.error(`partialinfo command ERRORED ${e.stack}`);
}
});
program.parse(process.argv);<|fim▁end|>
|
try {
const config = require(path.join(process.cwd(), configFN));
let akasha = config.akasha;
|
<|file_name|>reportService.js<|end_file_name|><|fim▁begin|>(function () {
'use strict';
app.service('reportService', reportService);
function reportService($http, $window, $interval, timeAgo, restCall, queryService, dashboardFactory, ngAuthSettings, reportServiceUrl) {
var populateReport = function (report, url) {
function successCallback(response) {
if (response.data.data.length === 0) {
report.status = 'EMPTY'
} else {
report.status = 'SUCCESS';
report.data = response.data.data;
report.getTotal();
console.log(report);
}
}
function errorCallback(error) {
console.log(error);
this.status = 'FAILED';
}
restCall('GET', url, null, successCallback, errorCallback)
}
var getReport = function () {
return {
data: {},
searchParam : {
startdate: null,
enddate: null,
type: "ENTERPRISE",
generateexcel: false,
userid: null,
username: null
},
total : {
totalVendor: 0,
totalDelivery: 0,<|fim▁hole|> totalCompleted: 0,
totalCancelled: 0,
totalProductPrice: 0,
totalDeliveryCharge: 0
},
getTotal: function () {
var itSelf = this;
itSelf.total.totalVendor = 0;
itSelf.total.totalDelivery = 0;
itSelf.total.totalPending = 0;
itSelf.total.totalInProgress = 0;
itSelf.total.totalCompleted = 0;
itSelf.total.totalCancelled = 0;
itSelf.total.totalProductPrice = 0;
itSelf.total.totalDeliveryCharge = 0;
angular.forEach(this.data, function (value, key) {
itSelf.total.totalVendor += 1;
itSelf.total.totalDelivery += value.TotalDelivery;
itSelf.total.totalPending += value.TotalPending;
itSelf.total.totalInProgress += value.TotalInProgress;
itSelf.total.totalCompleted += value.TotalCompleted;
itSelf.total.totalCancelled += value.TotalCancelled;
itSelf.total.totalProductPrice += value.TotalProductPrice;
itSelf.total.totalDeliveryCharge += value.TotalDeliveryCharge;
});
console.log(this.total)
},
getUrl: function () {
// FIXME: need to be refactored
if (this.searchParam.type === "BIKE_MESSENGER") {
return reportServiceUrl + "api/report?startdate="+this.searchParam.startdate+"&enddate="+this.searchParam.enddate+"&usertype="+this.searchParam.type;
}
return reportServiceUrl + "api/report?startdate="+this.searchParam.startdate+"&enddate="+this.searchParam.enddate+"&usertype="+this.searchParam.type;
},
getReport: function () {
var reportUrl = this.getUrl();
this.status = 'IN_PROGRESS';
populateReport(this, reportUrl);
},
goToReportJobs : function (user) {
console.log(user)
console.log(this.data)
if (this.searchParam.type === "BIKE_MESSENGER") {
$window.open("#/report/jobs?" + "startdate=" + this.searchParam.startdate + "&enddate="+ this.searchParam.enddate +
"&usertype=BIKE_MESSENGER" + "&userid=" + this.data[user].UserId, '_blank');
} else {
$window.open("#/report/jobs?" + "startdate=" + this.searchParam.startdate + "&enddate="+ this.searchParam.enddate + "&usertype=" + this.searchParam.type + "&username=" + user, '_blank');
}
},
exportExcel : function () {
var excelReportUrl = reportServiceUrl + "api/report?startdate="+this.searchParam.startdate+"&enddate="+this.searchParam.enddate+"&usertype="+this.searchParam.type + "&generateexcel=true";
$window.open(excelReportUrl, '_blank');
},
status : 'NONE'
}
}
return {
getReport: getReport
}
}
})();<|fim▁end|>
|
totalPending: 0,
totalInProgress: 0,
|
<|file_name|>spinner.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (https://github.com/gtk-rs/gir)
// from gir-files (https://github.com/gtk-rs/gir-files)
// DO NOT EDIT
use Buildable;
use Widget;
use ffi;
use glib;
use glib::StaticType;
use glib::Value;
use glib::object::Downcast;
use glib::object::IsA;
use glib::signal::SignalHandlerId;
use glib::signal::connect;
use glib::translate::*;
use glib_ffi;
use gobject_ffi;
use std::boxed::Box as Box_;
use std::mem;
use std::mem::transmute;
use std::ptr;
glib_wrapper! {
pub struct Spinner(Object<ffi::GtkSpinner, ffi::GtkSpinnerClass>): Widget, Buildable;
match fn {
get_type => || ffi::gtk_spinner_get_type(),
}
}
impl Spinner {
pub fn new() -> Spinner {
assert_initialized_main_thread!();
unsafe {
Widget::from_glib_none(ffi::gtk_spinner_new()).downcast_unchecked()
}
}
}
impl Default for Spinner {
fn default() -> Self {
Self::new()
}
}
pub trait SpinnerExt {<|fim▁hole|> fn stop(&self);
fn get_property_active(&self) -> bool;
fn set_property_active(&self, active: bool);
fn connect_property_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId;
}
impl<O: IsA<Spinner> + IsA<glib::object::Object>> SpinnerExt for O {
fn start(&self) {
unsafe {
ffi::gtk_spinner_start(self.to_glib_none().0);
}
}
fn stop(&self) {
unsafe {
ffi::gtk_spinner_stop(self.to_glib_none().0);
}
}
fn get_property_active(&self) -> bool {
unsafe {
let mut value = Value::from_type(<bool as StaticType>::static_type());
gobject_ffi::g_object_get_property(self.to_glib_none().0, "active".to_glib_none().0, value.to_glib_none_mut().0);
value.get().unwrap()
}
}
fn set_property_active(&self, active: bool) {
unsafe {
gobject_ffi::g_object_set_property(self.to_glib_none().0, "active".to_glib_none().0, Value::from(&active).to_glib_none().0);
}
}
fn connect_property_active_notify<F: Fn(&Self) + 'static>(&self, f: F) -> SignalHandlerId {
unsafe {
let f: Box_<Box_<Fn(&Self) + 'static>> = Box_::new(Box_::new(f));
connect(self.to_glib_none().0, "notify::active",
transmute(notify_active_trampoline::<Self> as usize), Box_::into_raw(f) as *mut _)
}
}
}
unsafe extern "C" fn notify_active_trampoline<P>(this: *mut ffi::GtkSpinner, _param_spec: glib_ffi::gpointer, f: glib_ffi::gpointer)
where P: IsA<Spinner> {
let f: &&(Fn(&P) + 'static) = transmute(f);
f(&Spinner::from_glib_borrow(this).downcast_unchecked())
}<|fim▁end|>
|
fn start(&self);
|
<|file_name|>Buffer.cpp<|end_file_name|><|fim▁begin|>/*
Buffer.cpp - This file is part of Element
Copyright (C) 2014 Kushview, LLC. All rights reserved.
*/
<|fim▁hole|>#if ELEMENT_BUFFER_FACTORY
Buffer::Buffer (DataType dataType_, uint32 subType_)
: factory (nullptr),
refs (0),
dataType (dataType_),
subType (subType_),
capacity (0),
next (nullptr)
{ }
Buffer::~Buffer() { }
void Buffer::attach (BufferFactory* owner)
{
jassert (factory == nullptr && owner != nullptr);
factory = owner;
}
void Buffer::recycle()
{
if (isManaged())
factory->recycle (this);
}
void intrusive_ptr_add_ref (Buffer* b)
{
if (b->isManaged())
++b->refs;
}
void intrusive_ptr_release (Buffer* b)
{
if (b->isManaged())
if (--b->refs == 0)
b->recycle();
}
#endif<|fim▁end|>
| |
<|file_name|>parse-links.service.js<|end_file_name|><|fim▁begin|>'use strict';
angular.module('donetexampleApp')
.service('ParseLinks', function () {
this.parse = function (header) {
if (header.length == 0) {
throw new Error("input must not be of zero length");
}<|fim▁hole|>
// Split parts by comma
var parts = header.split(',');
var links = {};
// Parse each part into a named link
angular.forEach(parts, function (p) {
var section = p.split(';');
if (section.length != 2) {
throw new Error("section could not be split on ';'");
}
var url = section[0].replace(/<(.*)>/, '$1').trim();
var queryString = {};
url.replace(
new RegExp("([^?=&]+)(=([^&]*))?", "g"),
function($0, $1, $2, $3) { queryString[$1] = $3; }
);
var page = queryString['page'];
if( angular.isString(page) ) {
page = parseInt(page);
}
var name = section[1].replace(/rel="(.*)"/, '$1').trim();
links[name] = page;
});
return links;
}
});<|fim▁end|>
| |
<|file_name|>descriptor.go<|end_file_name|><|fim▁begin|>package lib
import (
"strings"
cvmfsUtil "github.com/cvmfs/docker-graphdriver/plugins/util"
)
// m is the manifest of the original image
// repoLocation is where inside the repo we saved the several layers
// origin is an ecoding fo the original referencese and original registry
// I believe origin is quite useless but maybe is better to preserv it for
// ergonomic reasons.
func MakeThinImage(m Manifest, repoLocation string, origin string) cvmfsUtil.ThinImage {
layers := make([]cvmfsUtil.ThinImageLayer, len(m.Layers))
url_base := "cvmfs://" + repoLocation
for i, l := range m.Layers {
d := strings.Split(l.Digest, ":")[1]
url := url_base + "/" + d
layers[i] = cvmfsUtil.ThinImageLayer{Digest: d, Url: url}<|fim▁hole|> }
return cvmfsUtil.ThinImage{Layers: layers,
Origin: origin,
Version: thinImageVersion}
}<|fim▁end|>
| |
<|file_name|>run.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|> Input: Templated Antimony model (stdin)
Output: Expanded Antimony model (stdout)
"""
import fileinput
import os
import sys
directory = os.path.dirname(os.path.abspath(__file__))
path = os.path.join(directory, "TemplateSB")
sys.path.append(path)
from template_processor import TemplateProcessor
template_stg = ''
for line in fileinput.input():
template_stg += "\n" + line
processor = TemplateProcessor(template_stg)
expanded_stg = processor.do()
sys.stdout.write(expanded_stg)<|fim▁end|>
|
Running the template pre-processor standalone.
|
<|file_name|>BlockNoteblock.java<|end_file_name|><|fim▁begin|>package cn.nukkit.block;
import cn.nukkit.Player;
import cn.nukkit.item.Item;
import cn.nukkit.item.ItemTool;
import cn.nukkit.level.Level;
import cn.nukkit.level.sound.NoteBoxSound;
import cn.nukkit.network.protocol.BlockEventPacket;
/**
* Created by Snake1999 on 2016/1/17.
* Package cn.nukkit.block in project nukkit.
*/
public class BlockNoteblock extends BlockSolid {
public BlockNoteblock() {
this(0);
}
public BlockNoteblock(int meta) {
super(meta);
}
@Override
public String getName() {
return "Note Block";
}
@Override
public int getId() {
return NOTEBLOCK;
}
@Override
public int getToolType() {
return ItemTool.TYPE_AXE;
}
@Override
public double getHardness() {
return 0.8D;
}
@Override
public double getResistance() {
return 4D;
}
public boolean canBeActivated() {
return true;
}
public int getStrength() {
return this.meta;
}
public void increaseStrength() {
if (this.meta < 24) {
this.meta++;
} else {
this.meta = 0;
}
}
public int getInstrument() {
Block below = this.down();
switch (below.getId()) {
case WOODEN_PLANK:
case NOTEBLOCK:
case CRAFTING_TABLE:
return NoteBoxSound.INSTRUMENT_BASS;
case SAND:
case SANDSTONE:
case SOUL_SAND:
return NoteBoxSound.INSTRUMENT_TABOUR;
case GLASS:
case GLASS_PANEL:
case GLOWSTONE_BLOCK:
return NoteBoxSound.INSTRUMENT_CLICK;
case COAL_ORE:
case DIAMOND_ORE:
case EMERALD_ORE:
case GLOWING_REDSTONE_ORE:<|fim▁hole|> return NoteBoxSound.INSTRUMENT_BASS_DRUM;
default:
return NoteBoxSound.INSTRUMENT_PIANO;
}
}
public void emitSound() {
BlockEventPacket pk = new BlockEventPacket();
pk.x = (int) this.x;
pk.y = (int) this.y;
pk.z = (int) this.z;
pk.case1 = this.getInstrument();
pk.case2 = this.getStrength();
this.getLevel().addChunkPacket((int) this.x >> 4, (int) this.z >> 4, pk);
this.getLevel().addSound(new NoteBoxSound(this, this.getInstrument(), this.getStrength()));
}
public boolean onActivate(Item item) {
return this.onActivate(item, null);
}
public boolean onActivate(Item item, Player player) {
Block up = this.up();
if (up.getId() == Block.AIR) {
this.increaseStrength();
this.emitSound();
return true;
} else {
return false;
}
}
@Override
public int onUpdate(int type) {
if (type == Level.BLOCK_UPDATE_NORMAL || type == Level.BLOCK_UPDATE_REDSTONE) {
//TODO: redstone
}
return 0;
}
}<|fim▁end|>
|
case GOLD_ORE:
case IRON_ORE:
case LAPIS_ORE:
case REDSTONE_ORE:
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
import netaddr
from neutron_lib import exceptions
from oslo_log import log as logging
from oslo_policy import policy as oslo_policy
from oslo_utils import excutils
import six
import webob.exc
from neutron._i18n import _, _LE, _LI
from neutron.api import api_common
from neutron.api.v2 import attributes
from neutron.api.v2 import resource as wsgi_resource
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.db import api as db_api
from neutron import policy
from neutron import quota
from neutron.quota import resource_registry
LOG = logging.getLogger(__name__)
FAULT_MAP = {exceptions.NotFound: webob.exc.HTTPNotFound,
exceptions.Conflict: webob.exc.HTTPConflict,
exceptions.InUse: webob.exc.HTTPConflict,
exceptions.BadRequest: webob.exc.HTTPBadRequest,
exceptions.ServiceUnavailable: webob.exc.HTTPServiceUnavailable,
exceptions.NotAuthorized: webob.exc.HTTPForbidden,
netaddr.AddrFormatError: webob.exc.HTTPBadRequest,
oslo_policy.PolicyNotAuthorized: webob.exc.HTTPForbidden
}
class Controller(object):
LIST = 'list'
SHOW = 'show'
CREATE = 'create'
UPDATE = 'update'
DELETE = 'delete'
@property
def plugin(self):
return self._plugin
@property
def resource(self):
return self._resource
@property
def attr_info(self):
return self._attr_info
@property
def member_actions(self):
return self._member_actions
def __init__(self, plugin, collection, resource, attr_info,
allow_bulk=False, member_actions=None, parent=None,
allow_pagination=False, allow_sorting=False):
if member_actions is None:
member_actions = []
self._plugin = plugin
self._collection = collection.replace('-', '_')
self._resource = resource.replace('-', '_')
self._attr_info = attr_info
self._allow_bulk = allow_bulk
self._allow_pagination = allow_pagination
self._allow_sorting = allow_sorting
self._native_bulk = self._is_native_bulk_supported()
self._native_pagination = self._is_native_pagination_supported()
self._native_sorting = self._is_native_sorting_supported()
self._policy_attrs = [name for (name, info) in self._attr_info.items()
if info.get('required_by_policy')]
self._notifier = n_rpc.get_notifier('network')
self._member_actions = member_actions
self._primary_key = self._get_primary_key()
if self._allow_pagination and self._native_pagination:
# Native pagination need native sorting support
if not self._native_sorting:
raise exceptions.Invalid(
_("Native pagination depend on native sorting")
)
if not self._allow_sorting:
LOG.info(_LI("Allow sorting is enabled because native "
"pagination requires native sorting"))
self._allow_sorting = True
self.parent = parent
if parent:
self._parent_id_name = '%s_id' % parent['member_name']
parent_part = '_%s' % parent['member_name']
else:
self._parent_id_name = None
parent_part = ''
self._plugin_handlers = {
self.LIST: 'get%s_%s' % (parent_part, self._collection),
self.SHOW: 'get%s_%s' % (parent_part, self._resource)
}
for action in [self.CREATE, self.UPDATE, self.DELETE]:
self._plugin_handlers[action] = '%s%s_%s' % (action, parent_part,
self._resource)
def _get_primary_key(self, default_primary_key='id'):
for key, value in six.iteritems(self._attr_info):
if value.get('primary_key', False):
return key
return default_primary_key
def _is_native_bulk_supported(self):
native_bulk_attr_name = ("_%s__native_bulk_support"
% self._plugin.__class__.__name__)
return getattr(self._plugin, native_bulk_attr_name, False)
def _is_native_pagination_supported(self):
return api_common.is_native_pagination_supported(self._plugin)
def _is_native_sorting_supported(self):
return api_common.is_native_sorting_supported(self._plugin)
def _exclude_attributes_by_policy(self, context, data):
"""Identifies attributes to exclude according to authZ policies.
Return a list of attribute names which should be stripped from the
response returned to the user because the user is not authorized
to see them.
"""
attributes_to_exclude = []
for attr_name in data.keys():
attr_data = self._attr_info.get(attr_name)
if attr_data and attr_data['is_visible']:
if policy.check(
context,
'%s:%s' % (self._plugin_handlers[self.SHOW], attr_name),
data,
might_not_exist=True,
pluralized=self._collection):
# this attribute is visible, check next one
continue
# if the code reaches this point then either the policy check
# failed or the attribute was not visible in the first place
attributes_to_exclude.append(attr_name)
return attributes_to_exclude
def _view(self, context, data, fields_to_strip=None):
"""Build a view of an API resource.
:param context: the neutron context
:param data: the object for which a view is being created
:param fields_to_strip: attributes to remove from the view
:returns: a view of the object which includes only attributes
visible according to API resource declaration and authZ policies.
"""
fields_to_strip = ((fields_to_strip or []) +
self._exclude_attributes_by_policy(context, data))
return self._filter_attributes(context, data, fields_to_strip)
def _filter_attributes(self, context, data, fields_to_strip=None):
if not fields_to_strip:
return data
return dict(item for item in six.iteritems(data)
if (item[0] not in fields_to_strip))
def _do_field_list(self, original_fields):
fields_to_add = None
# don't do anything if fields were not specified in the request
if original_fields:
fields_to_add = [attr for attr in self._policy_attrs
if attr not in original_fields]
original_fields.extend(self._policy_attrs)
return original_fields, fields_to_add
def __getattr__(self, name):
if name in self._member_actions:
@db_api.retry_db_errors
def _handle_action(request, id, **kwargs):
arg_list = [request.context, id]
# Ensure policy engine is initialized
policy.init()
# Fetch the resource and verify if the user can access it
try:
parent_id = kwargs.get(self._parent_id_name)
resource = self._item(request,
id,
do_authz=True,
field_list=None,
parent_id=parent_id)
except oslo_policy.PolicyNotAuthorized:
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
body = kwargs.pop('body', None)
# Explicit comparison with None to distinguish from {}
if body is not None:
arg_list.append(body)
# It is ok to raise a 403 because accessibility to the
# object was checked earlier in this method
policy.enforce(request.context,
name,
resource,
pluralized=self._collection)
ret_value = getattr(self._plugin, name)(*arg_list, **kwargs)
# It is simply impossible to predict whether one of this
# actions alters resource usage. For instance a tenant port
# is created when a router interface is added. Therefore it is
# important to mark as dirty resources whose counters have
# been altered by this operation
resource_registry.set_resources_dirty(request.context)
return ret_value
return _handle_action
else:
raise AttributeError()
def _get_pagination_helper(self, request):
if self._allow_pagination and self._native_pagination:
return api_common.PaginationNativeHelper(request,
self._primary_key)
elif self._allow_pagination:
return api_common.PaginationEmulatedHelper(request,
self._primary_key)
return api_common.NoPaginationHelper(request, self._primary_key)
def _get_sorting_helper(self, request):
if self._allow_sorting and self._native_sorting:
return api_common.SortingNativeHelper(request, self._attr_info)
elif self._allow_sorting:
return api_common.SortingEmulatedHelper(request, self._attr_info)
return api_common.NoSortingHelper(request, self._attr_info)
def _items(self, request, do_authz=False, parent_id=None):
"""Retrieves and formats a list of elements of the requested entity."""
# NOTE(salvatore-orlando): The following ensures that fields which
# are needed for authZ policy validation are not stripped away by the
# plugin before returning.
original_fields, fields_to_add = self._do_field_list(
api_common.list_args(request, 'fields'))
filters = api_common.get_filters(request, self._attr_info,
['fields', 'sort_key', 'sort_dir',
'limit', 'marker', 'page_reverse'])
kwargs = {'filters': filters,
'fields': original_fields}
sorting_helper = self._get_sorting_helper(request)
pagination_helper = self._get_pagination_helper(request)
sorting_helper.update_args(kwargs)
sorting_helper.update_fields(original_fields, fields_to_add)
pagination_helper.update_args(kwargs)
pagination_helper.update_fields(original_fields, fields_to_add)
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, self._plugin_handlers[self.LIST])
obj_list = obj_getter(request.context, **kwargs)
obj_list = sorting_helper.sort(obj_list)
obj_list = pagination_helper.paginate(obj_list)
# Check authz
if do_authz:
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
# Omit items from list that should not be visible
obj_list = [obj for obj in obj_list
if policy.check(request.context,
self._plugin_handlers[self.SHOW],
obj,
plugin=self._plugin,
pluralized=self._collection)]
# Use the first element in the list for discriminating which attributes
# should be filtered out because of authZ policies
# fields_to_add contains a list of attributes added for request policy
# checks but that were not required by the user. They should be
# therefore stripped
fields_to_strip = fields_to_add or []
if obj_list:
fields_to_strip += self._exclude_attributes_by_policy(
request.context, obj_list[0])
collection = {self._collection:
[self._filter_attributes(
request.context, obj,
fields_to_strip=fields_to_strip)
for obj in obj_list]}
pagination_links = pagination_helper.get_links(obj_list)
if pagination_links:
collection[self._collection + "_links"] = pagination_links
# Synchronize usage trackers, if needed
resource_registry.resync_resource(
request.context, self._resource, request.context.tenant_id)
return collection
def _item(self, request, id, do_authz=False, field_list=None,
parent_id=None):
"""Retrieves and formats a single element of the requested entity."""
kwargs = {'fields': field_list}
action = self._plugin_handlers[self.SHOW]
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj_getter = getattr(self._plugin, action)
obj = obj_getter(request.context, id, **kwargs)
# Check authz
# FIXME(salvatore-orlando): obj_getter might return references to
# other resources. Must check authZ on them too.
if do_authz:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
return obj
@db_api.retry_db_errors
def index(self, request, **kwargs):
"""Returns a list of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return self._items(request, True, parent_id)
@db_api.retry_db_errors
def show(self, request, id, **kwargs):
"""Returns detailed information about the requested entity."""
try:
# NOTE(salvatore-orlando): The following ensures that fields
# which are needed for authZ policy validation are not stripped
# away by the plugin before returning.
field_list, added_fields = self._do_field_list(
api_common.list_args(request, "fields"))
parent_id = kwargs.get(self._parent_id_name)
# Ensure policy engine is initialized
policy.init()
return {self._resource:
self._view(request.context,
self._item(request,
id,
do_authz=True,
field_list=field_list,
parent_id=parent_id),
fields_to_strip=added_fields)}
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def _emulate_bulk_create(self, obj_creator, request, body, parent_id=None):
objs = []
try:
for item in body[self._collection]:
kwargs = {self._resource: item}
if parent_id:
kwargs[self._parent_id_name] = parent_id
fields_to_strip = self._exclude_attributes_by_policy(
request.context, item)
objs.append(self._filter_attributes(
request.context,
obj_creator(request.context, **kwargs),
fields_to_strip=fields_to_strip))
return objs
# Note(salvatore-orlando): broad catch as in theory a plugin
# could raise any kind of exception
except Exception:
with excutils.save_and_reraise_exception():
for obj in objs:
obj_deleter = getattr(self._plugin,
self._plugin_handlers[self.DELETE])
try:
kwargs = ({self._parent_id_name: parent_id}
if parent_id else {})
obj_deleter(request.context, obj['id'], **kwargs)
except Exception:
# broad catch as our only purpose is to log the
# exception
LOG.exception(_LE("Unable to undo add for "
"%(resource)s %(id)s"),
{'resource': self._resource,
'id': obj['id']})
# TODO(salvatore-orlando): The object being processed when the
# plugin raised might have been created or not in the db.
# We need a way for ensuring that if it has been created,
# it is then deleted
def create(self, request, body=None, **kwargs):
self._notifier.info(request.context,
self._resource + '.create.start',
body)
return self._create(request, body, **kwargs)
@db_api.retry_db_errors
def _create(self, request, body, **kwargs):
"""Creates a new instance of the requested entity."""
parent_id = kwargs.get(self._parent_id_name)
body = Controller.prepare_request_body(request.context,
body, True,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.CREATE]
# Check authz
if self._collection in body:
# Have to account for bulk create
items = body[self._collection]
else:
items = [body]
# Ensure policy engine is initialized
policy.init()
# Store requested resource amounts grouping them by tenant
# This won't work with multiple resources. However because of the
# current structure of this controller there will hardly be more than
# one resource for which reservations are being made
request_deltas = collections.defaultdict(int)
for item in items:
self._validate_network_tenant_ownership(request,
item[self._resource])
policy.enforce(request.context,
action,
item[self._resource],
pluralized=self._collection)
if 'tenant_id' not in item[self._resource]:
# no tenant_id - no quota check
continue
tenant_id = item[self._resource]['tenant_id']
request_deltas[tenant_id] += 1
# Quota enforcement
reservations = []
try:
for (tenant, delta) in request_deltas.items():
reservation = quota.QUOTAS.make_reservation(
request.context,
tenant,
{self._resource: delta},
self._plugin)
reservations.append(reservation)
except n_exc.QuotaResourceUnknown as e:
# We don't want to quota this resource
LOG.debug(e)
def notify(create_result):
# Ensure usage trackers for all resources affected by this API
# operation are marked as dirty
with request.context.session.begin():
# Commit the reservation(s)
for reservation in reservations:
quota.QUOTAS.commit_reservation(
request.context, reservation.reservation_id)
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.create.end'
self._notifier.info(request.context,
notifier_method,
create_result)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=create_result,
method_name=notifier_method,
collection=self._collection,
action=action, original={})
return create_result
def do_create(body, bulk=False, emulated=False):
kwargs = {self._parent_id_name: parent_id} if parent_id else {}
if bulk and not emulated:
obj_creator = getattr(self._plugin, "%s_bulk" % action)
else:
obj_creator = getattr(self._plugin, action)
try:
if emulated:
return self._emulate_bulk_create(obj_creator, request,
body, parent_id)
else:
if self._collection in body:
# This is weird but fixing it requires changes to the
# plugin interface
kwargs.update({self._collection: body})
else:
kwargs.update({self._resource: body})
return obj_creator(request.context, **kwargs)
except Exception:
# In case of failure the plugin will always raise an
# exception. Cancel the reservation
with excutils.save_and_reraise_exception():
for reservation in reservations:
quota.QUOTAS.cancel_reservation(
request.context, reservation.reservation_id)
if self._collection in body and self._native_bulk:
# plugin does atomic bulk create operations
objs = do_create(body, bulk=True)
# Use first element of list to discriminate attributes which
# should be removed because of authZ policies
fields_to_strip = self._exclude_attributes_by_policy(
request.context, objs[0])
return notify({self._collection: [self._filter_attributes(
request.context, obj, fields_to_strip=fields_to_strip)
for obj in objs]})
else:
if self._collection in body:
# Emulate atomic bulk behavior
objs = do_create(body, bulk=True, emulated=True)
return notify({self._collection: objs})
else:
obj = do_create(body)
return notify({self._resource: self._view(request.context,
obj)})
def delete(self, request, id, **kwargs):
"""Deletes the specified entity."""
if request.body:
msg = _('Request body is not supported in DELETE.')
raise webob.exc.HTTPBadRequest(msg)
self._notifier.info(request.context,
self._resource + '.delete.start',
{self._resource + '_id': id})
return self._delete(request, id, **kwargs)
@db_api.retry_db_errors
def _delete(self, request, id, **kwargs):
action = self._plugin_handlers[self.DELETE]
# Check authz
policy.init()
parent_id = kwargs.get(self._parent_id_name)
obj = self._item(request, id, parent_id=parent_id)
try:
policy.enforce(request.context,
action,
obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
# To avoid giving away information, pretend that it
# doesn't exist
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_deleter = getattr(self._plugin, action)
obj_deleter(request.context, id, **kwargs)
# A delete operation usually alters resource usage, so mark affected
# usage trackers as dirty
resource_registry.set_resources_dirty(request.context)
notifier_method = self._resource + '.delete.end'
result = {self._resource: self._view(request.context, obj)}
notifier_payload = {self._resource + '_id': id}
notifier_payload.update(result)
self._notifier.info(request.context,
notifier_method,
notifier_payload)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=result,
method_name=notifier_method, action=action,
original={})
def update(self, request, id, body=None, **kwargs):
"""Updates the specified entity's attributes."""
try:
payload = body.copy()
except AttributeError:
msg = _("Invalid format: %s") % request.body
raise exceptions.BadRequest(resource='body', msg=msg)
payload['id'] = id
self._notifier.info(request.context,
self._resource + '.update.start',
payload)
return self._update(request, id, body, **kwargs)
@db_api.retry_db_errors
def _update(self, request, id, body, **kwargs):
body = Controller.prepare_request_body(request.context,
body, False,
self._resource, self._attr_info,
allow_bulk=self._allow_bulk)
action = self._plugin_handlers[self.UPDATE]
# Load object to check authz
# but pass only attributes in the original body and required
# by the policy engine to the policy 'brain'
field_list = [name for (name, value) in six.iteritems(self._attr_info)
if (value.get('required_by_policy') or
value.get('primary_key') or
'default' not in value)]
# Ensure policy engine is initialized
policy.init()
parent_id = kwargs.get(self._parent_id_name)
orig_obj = self._item(request, id, field_list=field_list,
parent_id=parent_id)
orig_object_copy = copy.copy(orig_obj)
orig_obj.update(body[self._resource])
# Make a list of attributes to be updated to inform the policy engine
# which attributes are set explicitly so that it can distinguish them
# from the ones that are set to their default values.
orig_obj[n_const.ATTRIBUTES_TO_UPDATE] = body[self._resource].keys()
try:
policy.enforce(request.context,
action,
orig_obj,
pluralized=self._collection)
except oslo_policy.PolicyNotAuthorized:
with excutils.save_and_reraise_exception() as ctxt:
# If a tenant is modifying its own object, it's safe to return<|fim▁hole|> orig_obj_tenant_id = orig_obj.get("tenant_id")
if (request.context.tenant_id != orig_obj_tenant_id or
orig_obj_tenant_id is None):
ctxt.reraise = False
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
obj_updater = getattr(self._plugin, action)
kwargs = {self._resource: body}
if parent_id:
kwargs[self._parent_id_name] = parent_id
obj = obj_updater(request.context, id, **kwargs)
# Usually an update operation does not alter resource usage, but as
# there might be side effects it might be worth checking for changes
# in resource usage here as well (e.g: a tenant port is created when a
# router interface is added)
resource_registry.set_resources_dirty(request.context)
result = {self._resource: self._view(request.context, obj)}
notifier_method = self._resource + '.update.end'
self._notifier.info(request.context, notifier_method, result)
registry.notify(self._resource, events.BEFORE_RESPONSE, self,
context=request.context, data=result,
method_name=notifier_method, action=action,
original=orig_object_copy)
return result
@staticmethod
def prepare_request_body(context, body, is_create, resource, attr_info,
allow_bulk=False):
"""Verifies required attributes are in request body.
Also checking that an attribute is only specified if it is allowed
for the given operation (create/update).
Attribute with default values are considered to be optional.
body argument must be the deserialized body.
"""
collection = resource + "s"
if not body:
raise webob.exc.HTTPBadRequest(_("Resource body required"))
LOG.debug("Request body: %(body)s", {'body': body})
try:
if collection in body:
if not allow_bulk:
raise webob.exc.HTTPBadRequest(_("Bulk operation "
"not supported"))
if not body[collection]:
raise webob.exc.HTTPBadRequest(_("Resources required"))
bulk_body = [
Controller.prepare_request_body(
context, item if resource in item
else {resource: item}, is_create, resource, attr_info,
allow_bulk) for item in body[collection]
]
return {collection: bulk_body}
res_dict = body.get(resource)
except (AttributeError, TypeError):
msg = _("Body contains invalid data")
raise webob.exc.HTTPBadRequest(msg)
if res_dict is None:
msg = _("Unable to find '%s' in request body") % resource
raise webob.exc.HTTPBadRequest(msg)
attributes.populate_tenant_id(context, res_dict, attr_info, is_create)
attributes.verify_attributes(res_dict, attr_info)
if is_create: # POST
attributes.fill_default_value(attr_info, res_dict,
webob.exc.HTTPBadRequest)
else: # PUT
for attr, attr_vals in six.iteritems(attr_info):
if attr in res_dict and not attr_vals['allow_put']:
msg = _("Cannot update read-only attribute %s") % attr
raise webob.exc.HTTPBadRequest(msg)
attributes.convert_value(attr_info, res_dict, webob.exc.HTTPBadRequest)
return body
def _validate_network_tenant_ownership(self, request, resource_item):
# TODO(salvatore-orlando): consider whether this check can be folded
# in the policy engine
if (request.context.is_admin or request.context.is_advsvc or
self._resource not in ('port', 'subnet')):
return
network = self._plugin.get_network(
request.context,
resource_item['network_id'])
# do not perform the check on shared networks
if network.get('shared'):
return
network_owner = network['tenant_id']
if network_owner != resource_item['tenant_id']:
# NOTE(kevinbenton): we raise a 404 to hide the existence of the
# network from the tenant since they don't have access to it.
msg = _('The resource could not be found.')
raise webob.exc.HTTPNotFound(msg)
def create_resource(collection, resource, plugin, params, allow_bulk=False,
member_actions=None, parent=None, allow_pagination=False,
allow_sorting=False):
controller = Controller(plugin, collection, resource, params, allow_bulk,
member_actions=member_actions, parent=parent,
allow_pagination=allow_pagination,
allow_sorting=allow_sorting)
return wsgi_resource.Resource(controller, FAULT_MAP)<|fim▁end|>
|
# a 403. Otherwise, pretend that it doesn't exist to avoid
# giving away information.
|
<|file_name|>attrib.go<|end_file_name|><|fim▁begin|>package glw
import "golang.org/x/mobile/gl"
type A2fv gl.Attrib
func (a A2fv) Enable() { ctx.EnableVertexAttribArray(gl.Attrib(a)) }
func (a A2fv) Disable() { ctx.DisableVertexAttribArray(gl.Attrib(a)) }
func (a A2fv) Pointer() {
a.Enable()
ctx.VertexAttribPointer(gl.Attrib(a), 2, gl.FLOAT, false, 0, 0)
}
type A3fv gl.Attrib
func (a A3fv) Enable() { ctx.EnableVertexAttribArray(gl.Attrib(a)) }
func (a A3fv) Disable() { ctx.DisableVertexAttribArray(gl.Attrib(a)) }
func (a A3fv) Pointer() {
a.Enable()<|fim▁hole|>
type A4fv gl.Attrib
func (a A4fv) Enable() { ctx.EnableVertexAttribArray(gl.Attrib(a)) }
func (a A4fv) Disable() { ctx.DisableVertexAttribArray(gl.Attrib(a)) }
func (a A4fv) Pointer() {
a.Enable()
ctx.VertexAttribPointer(gl.Attrib(a), 4, gl.FLOAT, false, 0, 0)
}<|fim▁end|>
|
ctx.VertexAttribPointer(gl.Attrib(a), 3, gl.FLOAT, false, 0, 0)
}
|
<|file_name|>test_queue.py<|end_file_name|><|fim▁begin|>import uuid
import pytest
from kazoo.testing import KazooTestCase
from kazoo.tests.util import CI_ZK_VERSION
class KazooQueueTests(KazooTestCase):
def _makeOne(self):
path = "/" + uuid.uuid4().hex
return self.client.Queue(path)
def test_queue_validation(self):
queue = self._makeOne()
with pytest.raises(TypeError):
queue.put({})
with pytest.raises(TypeError):
queue.put(b"one", b"100")
with pytest.raises(TypeError):
queue.put(b"one", 10.0)
with pytest.raises(ValueError):
queue.put(b"one", -100)
with pytest.raises(ValueError):
queue.put(b"one", 100000)
def test_empty_queue(self):
queue = self._makeOne()
assert len(queue) == 0
assert queue.get() is None
assert len(queue) == 0
def test_queue(self):
queue = self._makeOne()
queue.put(b"one")
queue.put(b"two")
queue.put(b"three")
assert len(queue) == 3
assert queue.get() == b"one"
assert queue.get() == b"two"
assert queue.get() == b"three"
assert len(queue) == 0
def test_priority(self):
queue = self._makeOne()
queue.put(b"four", priority=101)
queue.put(b"one", priority=0)
queue.put(b"two", priority=0)
queue.put(b"three", priority=10)
assert queue.get() == b"one"
assert queue.get() == b"two"
assert queue.get() == b"three"
assert queue.get() == b"four"
class KazooLockingQueueTests(KazooTestCase):
def setUp(self):
KazooTestCase.setUp(self)
skip = False
if CI_ZK_VERSION and CI_ZK_VERSION < (3, 4):
skip = True
elif CI_ZK_VERSION and CI_ZK_VERSION >= (3, 4):
skip = False
else:
ver = self.client.server_version()
if ver[1] < 4:
skip = True
if skip:
pytest.skip("Must use Zookeeper 3.4 or above")
def _makeOne(self):
path = "/" + uuid.uuid4().hex
return self.client.LockingQueue(path)
def test_queue_validation(self):
queue = self._makeOne()
with pytest.raises(TypeError):
queue.put({})
with pytest.raises(TypeError):
queue.put(b"one", b"100")
with pytest.raises(TypeError):
queue.put(b"one", 10.0)
with pytest.raises(ValueError):
queue.put(b"one", -100)
with pytest.raises(ValueError):
queue.put(b"one", 100000)
with pytest.raises(TypeError):
queue.put_all({})
with pytest.raises(TypeError):
queue.put_all([{}])
with pytest.raises(TypeError):
queue.put_all([b"one"], b"100")
with pytest.raises(TypeError):
queue.put_all([b"one"], 10.0)
with pytest.raises(ValueError):
queue.put_all([b"one"], -100)
with pytest.raises(ValueError):
queue.put_all([b"one"], 100000)
def test_empty_queue(self):
queue = self._makeOne()
assert len(queue) == 0
assert queue.get(0) is None
assert len(queue) == 0
<|fim▁hole|> queue.put_all([b"two", b"three"])
assert len(queue) == 3
assert not queue.consume()
assert not queue.holds_lock()
assert queue.get(1) == b"one"
assert queue.holds_lock()
# Without consuming, should return the same element
assert queue.get(1) == b"one"
assert queue.consume()
assert not queue.holds_lock()
assert queue.get(1) == b"two"
assert queue.holds_lock()
assert queue.consume()
assert not queue.holds_lock()
assert queue.get(1) == b"three"
assert queue.holds_lock()
assert queue.consume()
assert not queue.holds_lock()
assert not queue.consume()
assert len(queue) == 0
def test_consume(self):
queue = self._makeOne()
queue.put(b"one")
assert not queue.consume()
queue.get(0.1)
assert queue.consume()
assert not queue.consume()
def test_release(self):
queue = self._makeOne()
queue.put(b"one")
assert queue.get(1) == b"one"
assert queue.holds_lock()
assert queue.release()
assert not queue.holds_lock()
assert queue.get(1) == b"one"
assert queue.consume()
assert not queue.release()
assert len(queue) == 0
def test_holds_lock(self):
queue = self._makeOne()
assert not queue.holds_lock()
queue.put(b"one")
queue.get(0.1)
assert queue.holds_lock()
queue.consume()
assert not queue.holds_lock()
def test_priority(self):
queue = self._makeOne()
queue.put(b"four", priority=101)
queue.put(b"one", priority=0)
queue.put(b"two", priority=0)
queue.put(b"three", priority=10)
assert queue.get(1) == b"one"
assert queue.consume()
assert queue.get(1) == b"two"
assert queue.consume()
assert queue.get(1) == b"three"
assert queue.consume()
assert queue.get(1) == b"four"
assert queue.consume()
def test_concurrent_execution(self):
queue = self._makeOne()
value1 = []
value2 = []
value3 = []
event1 = self.client.handler.event_object()
event2 = self.client.handler.event_object()
event3 = self.client.handler.event_object()
def get_concurrently(value, event):
q = self.client.LockingQueue(queue.path)
value.append(q.get(0.1))
event.set()
self.client.handler.spawn(get_concurrently, value1, event1)
self.client.handler.spawn(get_concurrently, value2, event2)
self.client.handler.spawn(get_concurrently, value3, event3)
queue.put(b"one")
event1.wait(0.2)
event2.wait(0.2)
event3.wait(0.2)
result = value1 + value2 + value3
assert result.count(b"one") == 1
assert result.count(None) == 2<|fim▁end|>
|
def test_queue(self):
queue = self._makeOne()
queue.put(b"one")
|
<|file_name|>templates.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
## $Id: webmessage_templates.py,v 1.32 2008/03/26 23:26:23 tibor Exp $
##
## handles rendering of webmessage module
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
""" Templates for field exporter plugin """
__revision__ = "$Id: webmessage_templates.py,v 1.32 2008/03/26 23:26:23 tibor Exp $"
import cgi
from invenio.config import CFG_SITE_LANG, CFG_SITE_URL
from invenio.base.i18n import gettext_set_language
from invenio.utils.date import convert_datestruct_to_datetext, convert_datetext_to_dategui, convert_datestruct_to_dategui
from invenio.legacy.bibexport.fieldexporter_dblayer import Job, JobResult
class Template:
"""Templates for field exporter plugin"""
_JOBS_URL = "%s/exporter/jobs" % (CFG_SITE_URL, )
_EDIT_JOB_URL = "%s/exporter/edit_job" % (CFG_SITE_URL, )
_EDIT_QUERY_URL = "%s/exporter/edit_query" % (CFG_SITE_URL, )
_JOB_RESULTS_URL = "%s/exporter/job_results" % (CFG_SITE_URL, )
_DISPLAY_JOB_RESULT_URL = "%s/exporter/display_job_result" % (CFG_SITE_URL, )
_DOWNLOAD_JOB_RESULT_URL = "%s/exporter/download_job_result" % (CFG_SITE_URL, )
_JOB_HISTORY_URL = "%s/exporter/history" % (CFG_SITE_URL, )
def tmpl_styles(self):
"""Defines the local CSS styles used in the plugin"""
styles = """
<style type="text/css">
.label{
white-space: nowrap;
padding-right: 15px;
}
.textentry{
width: 350px;
}
table.spacedcells td{
padding-right: 20px;
white-space: nowrap;
}
table.spacedcells th{
padding-right: 20px;
text-align: left;
}
</style>
<script type="text/javascript">
<!--
function SetAllCheckBoxes(FormName, FieldName, CheckValue)
{
if(!document.forms[FormName])
return;
var objCheckBoxes = document.forms[FormName].elements[FieldName];
if(!objCheckBoxes)
return;
var countCheckBoxes = objCheckBoxes.length;
if(!countCheckBoxes)
objCheckBoxes.checked = CheckValue;
else
// set the check value for all check boxes
for(var i = 0; i < countCheckBoxes; i++)
objCheckBoxes[i].checked = CheckValue;
}
// -->
</script>
"""
return styles
def tmpl_navigation_menu(self, language = CFG_SITE_LANG):
"""Returns HTML representing navigation menu for field exporter."""
_ = gettext_set_language(language)
navigation_menu = """
<table class="headermodulebox">
<tbody><tr>
<td class="headermoduleboxbody">
<a class="header" href="%(job_verview_url)s?ln=%(language)s">%(label_job_overview)s</a>
</td>
<td class="headermoduleboxbody"><|fim▁hole|> <td class="headermoduleboxbody">
<a class="header" href="%(job_history_url)s?ln=%(language)s">%(label_job_history)s</a>
</td>
</tr></tbody></table>
""" % {"edit_job_url" : self._EDIT_JOB_URL,
"job_verview_url" : self._JOBS_URL,
"job_history_url" : self._JOB_HISTORY_URL,
"language" : language,
"label_job_overview" : _("Export Job Overview"),
"label_new_job" : _("New Export Job"),
"label_job_history" : _("Export Job History")
}
return navigation_menu
def tmpl_display_jobs(self, jobs, language = CFG_SITE_LANG):
"""
Creates page for displaying of all the jobs.
@param jobs: list of the jobs that have to be displayed
@param language: language of the page
"""
_ = gettext_set_language(language)
table_rows = ""
for current_job in jobs:
# convert last run date into text proper to be shown to the user
datetext = convert_datestruct_to_datetext(current_job.get_last_run())
last_run = convert_datetext_to_dategui(datetext, language)
# obtain text corresponding to the frequency of execution
frequency = current_job.get_frequency()
frequency_text = self._get_frequency_text(frequency)
row = """<tr>
<td><input type="checkbox" name="selected_jobs" value="%(job_id)s"></input></td>
<td><a href="%(edit_job_url)s?id=%(job_id)s&ln=%(language)s">%(name)s</a></td>
<td>%(frequency)s</td>
<td>%(last_run)s</td>
</tr>""" % self._html_escape_dictionary({
"edit_job_url" : self._EDIT_JOB_URL,
"job_id" : current_job.get_id(),
"name" : current_job.get_name(),
"frequency" : frequency_text,
"language" : language,
"last_run" : last_run
})
table_rows += row
select_all_none_row = """
<tr><td colspan="4">
<small>%s</small><br><br>
</td></tr>""" \
%(self._get_select_all_none_html("jobsForm",
"selected_jobs",
language))
table_rows += select_all_none_row
buttons_row = """<tr>
<td colspan="3">
<input type="Submit" name="run_button" value="%(label_run)s" class="formbutton">
<input type="Submit" name="delete_button" value="%(label_delete)s" class="formbutton">
</td>
<td align="right">
<input type="Submit" name="new_button" value="%(label_new)s" class="formbutton">
</td>
</tr>""" % {
"label_run" : _("Run"),
"label_delete" : _("Delete"),
"label_new" : _("New")
}
table_rows += buttons_row
body = """
<form method="post" name="jobsForm">
<table class="spacedcells">
<th></th>
<th>%(label_name)s</th>
<th>%(label_frequency)s</th>
<th>%(label_last_run)s</th>
%(table_rows)s
</table>
</form>
""" % {
"table_rows" : table_rows,
"label_name" : _("Name"),
"label_frequency" : _("Run"),
"label_last_run" : _("Last run")
}
return body
def tmpl_edit_job(self, job, language = CFG_SITE_LANG):
"""
Creates page for editing of jobs.
@param job: The job that will be edited
@param language: language of the page
"""
_ = gettext_set_language(language)
job_frequency = job.get_frequency()
frequency_select_box_html = self._create_frequency_select_box("job_frequency", job_frequency, language)
output_format_select_box_html = self._create_output_format_select_box(selected_value = job.get_output_format())
body = """
<form method="post">
<input type="Hidden" name="id" value="%(job_id)s">
<table>
<tr>
<td class = "label">%(name_label)s</td>
<td colspan="2"><input type="text" name="job_name" class="textentry" value="%(job_name)s"></td>
</tr>
<tr>
<td class = "label">%(frequency_label)s</td>
<td colspan="2">%(frequency_select_box)s</td>
</tr>
<tr>
<td class = "label">%(output_format_label)s</td>
<td colspan="2">%(output_format_select_box)s</td>
</tr>
<tr>
<td class = "label">%(start_label)s</td>
<td colspan="2"><input type="text" name="last_run" class="textentry" value="%(job_last_run)s"></td>
</tr>
<tr>
<td class = "label">%(output_directory_label)s</td>
<td colspan="2"><input type="text" name="output_directory" class="textentry" value="%(output_directory)s"></td>
</tr>
<tr>
<td></td>
<td>
<input type="Submit" name="save_button" value="%(save_label)s" class="formbutton">
<input type="Submit" name="cancel_button" value="%(cancel_label)s" class="formbutton">
</td>
<td align="right">
<input type="Submit" name="edit_queries_button" value="%(edit_queries_label)s" class="formbutton">
</td>
</tr>
</table>
</form>
""" % {
"name_label" : _("Name"),
"frequency_label" : _("Frequency"),
"output_format_label" : _("Output Format"),
"start_label" : _("Start"),
"output_directory_label" : _("Output Directory"),
"save_label" : _("Save"),
"cancel_label" : _("Cancel"),
"edit_queries_label" : _("Edit Queries"),
"job_id" : self._html_escape_content(job.get_id()),
"job_name" : self._html_escape_content(job.get_name()),
"frequency_select_box" : frequency_select_box_html,
"output_format_select_box" : output_format_select_box_html,
"job_last_run" : convert_datestruct_to_datetext(job.get_last_run()),
"output_directory" : self._html_escape_content(job.get_output_directory())
}
return body
def tmpl_display_job_queries(self, job_queries, job_id, language = CFG_SITE_LANG):
"""
Creates page for displaying of queries of a given jobs.
@param job_queries: list of JobQuery objects that have to be displayed
@param job_id: identifier of the job that own the queries
@param language: language of the page
"""
_ = gettext_set_language(language)
table_rows = ""
for current_query in job_queries:
output_fields = ", ".join(current_query.get_output_fields())
row = """<tr>
<td><input type="checkbox" name="selected_queries" value="%(query_id)s"></input></td>
<td><a href="%(edit_query_url)s?id=%(query_id)s&job_id=%(job_id)s&ln=%(language)s">%(name)s</a></td>
<td><input type="text" value="%(search_criteria)s" readonly style="border: none; width: 130px"></td>
<td><input type="text" value="%(output_fields)s" readonly style="border: none; width: 130px"></td>
<td><input type="text" value="%(comment)s" readonly style="border: none; width: 130px"></td>
</tr>""" % self._html_escape_dictionary({
"edit_query_url" : self._EDIT_QUERY_URL,
"language" : language,
"query_id" : current_query.get_id(),
"search_criteria" : current_query.get_search_criteria(),
"name" : current_query.get_name(),
"comment" : current_query.get_comment(),
"output_fields" : output_fields,
"job_id" : job_id
})
table_rows += row
select_all_none_row = """
<tr><td colspan="4">
<small>%s</small><br><br>
</td></tr>""" \
% (self._get_select_all_none_html("queriesForm",
"selected_queries",
language))
table_rows += select_all_none_row
buttons_row = """<tr>
<td colspan="4">
<input type="Submit" name="run_button" value="%(label_run)s" class="formbutton">
<input type="Submit" name="delete_button" value="%(label_delete)s" class="formbutton">
</td>
<td align="right">
<input type="Submit" name="new_button" value="%(label_new)s" class="formbutton">
</td>
</tr>""" % {
"label_run" : _("Run"),
"label_delete" : _("Delete"),
"label_new" : _("New")
}
table_rows += buttons_row
body = """
<form method="post" name="queriesForm">
<input type="Hidden" name="job_id" value="%(job_id)s">
<table class="spacedcells">
<th></th>
<th>%(label_name)s</th>
<th>%(label_search_criteria)s</th>
<th>%(label_output_fields)s</th>
<th>%(label_comment)s</th>
%(table_rows)s
</table>
</form>
""" % {
"table_rows" : table_rows,
"label_name" : _("Name"),
"label_search_criteria" : _("Query"),
"label_comment" : _("Comment"),
"label_output_fields" : _("Output Fields"),
"job_id" : self._html_escape_content(job_id)
}
return body
def tmpl_edit_query(self, query, job_id, language = CFG_SITE_LANG):
"""
Creates page for editing of a query.
@param query: the query that will be edited
@param language: language of the page
@return: The HTML content of the page
"""
_ = gettext_set_language(language)
body = """
<form method="post">
<input type="Hidden" name="id" value="%(id)s">
<input type="Hidden" name="job_id" value="%(job_id)s">
<table >
<tr>
<td class = "label">%(name_label)s</td>
<td><input type="text" name="name" class="textentry" value="%(name)s"></td>
</tr>
<tr>
<td class = "label">%(query_label)s</td>
<td><input type="text" name="search_criteria" class="textentry" value="%(search_criteria)s"></td>
</tr>
<tr>
<td class = "label">%(output_fields_label)s</td>
<td><input type="text" name="output_fields" class="textentry" value="%(output_fields)s"></td>
</tr>
<tr>
<td class = "label">%(comment_label)s</td>
<td><textarea name="comment" rows="6" class="textentry">%(comment)s</textarea></td>
</tr>
<tr>
<td></td>
<td>
<input type="Submit" name="save_button" value="%(save_label)s" class="formbutton">
<input type="Submit" name="cancel_button" value="%(cancel_label)s" class="formbutton">
</td>
</tr>
</table>
</form>
""" % self._html_escape_dictionary({
"name_label" : _("Name"),
"query_label" : _("Query"),
"output_fields_label" : _("Output fields"),
"comment_label" : _("Comment"),
"save_label" : _("Save"),
"cancel_label" : _("Cancel"),
"job_id" : job_id,
"id" : query.get_id(),
"name" : query.get_name(),
"search_criteria" : query.get_search_criteria(),
"output_fields" : ", ".join(query.get_output_fields()),
"comment" : query.get_comment(),
})
return body
def tmpl_display_queries_results(self, job_result, language = CFG_SITE_LANG):
"""Creates a page displaying results from execution of multiple queries.
@param job_result: JobResult object containing the job results
that will be displayed
@param language: language of the page
@return: The HTML content of the page
"""
_ = gettext_set_language(language)
queries_results = job_result.get_query_results()
output_format = job_result.get_job().get_output_format()
job_result_id = job_result.get_id()
body = ""
if job_result_id != JobResult.ID_MISSING:
download_and_format_html = """
<a href="%(download_job_results_url)s?result_id=%(job_result_id)s&ln=%(language)s"><input type="button" value="%(label_download)s" class="formbutton"></a>
<strong>%(label_view_as)s</strong>
<a href="%(display_job_result_url)s?result_id=%(job_result_id)s&output_format=%(output_format_marcxml)s&ln=%(language)s">MARCXML</a>
<a href="%(display_job_result_url)s?result_id=%(job_result_id)s&output_format=%(output_format_marc)s&ln=%(language)s">MARC</a>
""" % self._html_escape_dictionary({
"label_download" : _("Download"),
"label_view_as" : _("View as: "),
"output_format_marcxml" : Job.OUTPUT_FORMAT_MARCXML,
"output_format_marc" : Job.OUTPUT_FORMAT_MARC,
"download_job_results_url" : self._DOWNLOAD_JOB_RESULT_URL,
"language" : language,
"display_job_result_url" : self._DISPLAY_JOB_RESULT_URL,
"job_result_id" : job_result_id
})
body += download_and_format_html
for query_result in queries_results:
query = query_result.get_query()
results = query_result.get_result(output_format)
html = """
<h2>%(name)s</h2>
<strong>%(query_label)s: </strong>%(search_criteria)s<br>
<strong>%(output_fields_label)s: </strong>%(output_fields)s<br>
<textarea rows="10" style="width: 100%%" wrap="off" readonly>%(results)s</textarea></td>
""" % self._html_escape_dictionary({
"query_label" : _("Query"),
"output_fields_label" : _("Output fields"),
"name" : query.get_name(),
"search_criteria" : query.get_search_criteria(),
"output_fields" : ",".join(query.get_output_fields()),
"results" : results
})
body += html
return body
def tmpl_display_job_history(self, job_results, language = CFG_SITE_LANG):
"""Creates a page displaying information about
the job results given as a parameter.
@param job_results: List of JobResult objects containing
information about the job results that have to be displayed
@param language: language of the page
@return: The HTML content of the page
"""
_ = gettext_set_language(language)
table_rows = ""
for current_job_result in job_results:
current_job = current_job_result.get_job()
# convert execution date into text proper to be shown to the user
execution_date_time = current_job_result.get_execution_date_time()
date = convert_datestruct_to_dategui(execution_date_time)
# obtain text corresponding to the frequency of execution
frequency = current_job.get_frequency()
frequency_text = self._get_frequency_text(frequency, language)
# set the status text
if current_job_result.STATUS_CODE_OK == current_job_result.get_status():
status = _("OK")
else:
status = _("Error")
records_found = current_job_result.get_number_of_records_found()
row = """<tr>
<td><a href="%(job_results_url)s?result_id=%(job_result_id)s&ln=%(language)s">%(job_name)s</a></td>
<td>%(job_frequency)s</td>
<td>%(execution_date)s</td>
<td><b>%(status)s</b>
<a href="%(display_job_result_url)s?result_id=%(job_result_id)s&ln=%(language)s">
<small>%(number_of_records_found)s %(label_records_found)s</small>
</a>
</td>
</tr>""" % self._html_escape_dictionary({
"job_name" : current_job.get_name(),
"job_frequency" : frequency_text,
"execution_date" : date,
"status" : status,
"number_of_records_found" : records_found,
"label_records_found" : _("records found"),
"job_results_url" : self._JOB_RESULTS_URL,
"display_job_result_url" : self._DISPLAY_JOB_RESULT_URL,
"language" : language,
"job_result_id" : current_job_result.get_id()
})
table_rows += row
body = """
<table class="spacedcells">
<th>%(label_job_name)s</th>
<th>%(label_job_frequency)s</th>
<th>%(label_execution_date)s</th>
<th>%(label_status)s</th>
%(table_rows)s
</table>
""" % {
"table_rows" : table_rows,
"label_job_name" : _("Job"),
"label_job_frequency" : _("Run"),
"label_execution_date" : _("Date"),
"label_status" : _("Status")
}
return body
def tmpl_display_job_result_information(self, job_result, language = CFG_SITE_LANG):
"""Creates a page with information about a given job result
@param job_result: JobResult object with containg the job result
@param language: language of the page
@return: The HTML content of the page
"""
_ = gettext_set_language(language)
table_rows = ""
for current_query_result in job_result.get_query_results():
current_query_name = current_query_result.get_query().get_name()
# set the status text
if current_query_result.STATUS_CODE_OK == current_query_result.get_status():
status = _("OK")
else:
status = _("Error")
records_found = current_query_result.get_number_of_records_found()
row = """<tr>
<td>%(query_name)s</td>
<td><b>%(status)s</b></td>
<td><small>%(number_of_records_found)s %(label_records_found)s</small></td>
</tr>""" % self._html_escape_dictionary({
"query_name" : current_query_name,
"status" : status,
"number_of_records_found" : records_found,
"label_records_found" : _("records found")
})
table_rows += row
number_of_all_records_found = job_result.get_number_of_records_found()
job_result_id = job_result.get_id()
final_row = """
<tr>
<td></td>
<td><b>%(label_total)s</b></td>
<td>
<a href="%(display_job_results_url)s?result_id=%(job_result_id)s&ln=%(language)s">
<b>%(number_of_all_records_found)s %(label_records_found)s</b>
</a>
</td>
</tr>""" % self._html_escape_dictionary({
"label_total" : _("Total"),
"number_of_all_records_found" : number_of_all_records_found,
"label_records_found" : _("records found"),
"display_job_results_url" : self._DISPLAY_JOB_RESULT_URL,
"language" : language,
"job_result_id" : job_result_id
})
table_rows += final_row
download_row = """
<tr>
<td></td><td></td><td>
<a href="%(download_job_results_url)s?result_id=%(job_result_id)s&ln=%(language)s">
<input type="button" value="%(label_download)s" class="formbutton">
</a>
</td>
</tr>""" % self._html_escape_dictionary({
"label_download" : _("Download"),
"download_job_results_url" : self._DOWNLOAD_JOB_RESULT_URL,
"language" : language,
"job_result_id" : job_result_id
})
table_rows += download_row
job_name = self._html_escape_content(job_result.get_job().get_name())
if(job_result.get_status() == job_result.STATUS_CODE_ERROR):
status_messasge = job_result.get_status_message()
else:
status_messasge = ""
status_messasge = self._html_escape_content(status_messasge)
body = """
<h2>%(job_name)s</h2>
<table class="spacedcells">
<th>%(label_query)s</th>
<th>%(label_status)s</th>
<th></th>
%(table_rows)s
</table>
<br>
<pre style="color: Red;">%(status_message)s</pre>
""" % {
"table_rows" : table_rows,
"label_query" : _("Query"),
"label_status" : _("Status"),
"job_name" : job_name,
"status_message" : status_messasge
}
return body
def _get_select_all_none_html(self, form_name, field_name, language = CFG_SITE_LANG):
"""Returns HTML providing Select All|None links
@param form_name: the name of the form containing the checkboxes
@param field_name: the name of the checkbox fields that will be affected
@param language: language for output
"""
_ = gettext_set_language(language)
output_html = """
%(label_select)s: <a href="javascript:SetAllCheckBoxes('%(form_name)s', '%(field_name)s', true);">%(label_all)s</a>, <a href="javascript:SetAllCheckBoxes('%(form_name)s', '%(field_name)s', false);">%(label_none)s</a>
"""% {
"label_select" : _("Select"),
"label_all" : _("All"),
"label_none" : _("None"),
"form_name" : form_name,
"field_name" : field_name
}
return output_html
def _get_frequency_text(self, frequency, language = CFG_SITE_LANG):
"""
Returns text representation of the frequency: Manually, Daily, Weekly, Monthly
@param frequency: integer containg the number of hours between every execution.
@param language: language for output
"""
_ = gettext_set_language(language)
if 0 == frequency:
frequency_text = _("Manually")
elif 24 == frequency:
frequency_text = _("Daily")
elif 168 == frequency:
frequency_text = _("Weekly")
elif 720 == frequency:
frequency_text = _("Monthly")
else:
frequency_text = "Every %s hours" % (frequency,)
return frequency_text
def _create_output_format_select_box(self, selected_value = 0):
"""
Creates a select box for output format of a job.
@param name: name of the control
@param language: language of the menu
@param selected_value: value selected in the control
@return: HTML string representing HTML select control.
"""
items = [("MARCXML", Job.OUTPUT_FORMAT_MARCXML),
("MARC", Job.OUTPUT_FORMAT_MARC)]
html_output = self._create_select_box("output_format", items, selected_value)
return html_output
def _create_frequency_select_box(self, name, selected_value = 0, language = CFG_SITE_LANG):
"""
Creates a select box for frequency of an action/task.
@param name: name of the control
@param language: language of the menu
@param selected_value: value selected in the control
@return: HTML string representing HTML select control.
"""
items = [(self._get_frequency_text(0, language), 0),
(self._get_frequency_text(24, language), 24),
(self._get_frequency_text(168, language), 168),
(self._get_frequency_text(720, language), 720)]
html_output = self._create_select_box(name, items, selected_value)
return html_output
def _create_select_box(self, name, items, selected_value = None):
""" Returns the HTML code for a select box.
@param name: the name of the control
@param items: list of (text, value) tuples where text is the text to be displayed
and value is the value corresponding to the text in the select box
e.g. [("first", 1), ("second", 2), ("third", 3)]
@param selected_value: the value that will be selected
in the select box.
"""
html_output = """<select name="%s">""" % name
for text, value in items:
if selected_value == value:
selected = 'selected="selected"'
else:
selected = ""
current_option = """<option value="%(value)s" %(selected)s>%(text)s</option>""" % self._html_escape_dictionary({
"value" : value,
"text" : text,
"selected" :selected
})
html_output += current_option
html_output += """</select>"""
return html_output
def _html_escape_dictionary(self, dictionaty_to_escape):
"""Escapes all the values in the dictionary and transform
them in strings that are safe to siplay in HTML page.
HTML special symbols are replaced with their sage equivalents.
@param dictionaty_to_escape: dictionary containing values
that have to be escaped.
@return: returns dictionary with the same keys where the
values are escaped strings"""
for key in dictionaty_to_escape:
value = "%s" % dictionaty_to_escape[key]
dictionaty_to_escape[key] = cgi.escape(value)
return dictionaty_to_escape
def _html_escape_content(self, content_to_escape):
"""Escapes the value given as parameter and
trasforms it to a string that is safe for display in HTML page.
@param content_to_escape: contains the content that have to be escaped.
@return: string containing the escaped content
"""
text_content = "%s" % content_to_escape
escaped_content = cgi.escape(text_content)
return escaped_content<|fim▁end|>
|
<a class="header" href="%(edit_job_url)s?ln=%(language)s">%(label_new_job)s</a>
</td>
|
<|file_name|>Command.java<|end_file_name|><|fim▁begin|>package redis.clients.jedis;
import com.esotericsoftware.reflectasm.MethodAccess;
import org.apache.log4j.Logger;
import java.util.Map;
public abstract class Command {
private final static Logger LOG = Logger.getLogger(Command.class);
private final static MethodAccess access = MethodAccess
.get(ShardedJedis.class);
protected ShardedJedisSentinelPool pool;
public String set(String key, String value) {
return String.valueOf(this.invoke("set", new String[] { key, value },
new Class[] { String.class, String.class }));
}
public String get(String key) {
return String.valueOf(this.invoke("get", new String[] { key },
String.class));
}
public long del(String key) {
return (long) this.invoke("del", new String[] { key }, String.class);
}
public String lpopList(String key) {
return String.valueOf(this.invoke("lpop", new String[] { key },
String.class));
}
public long rpushList(String key, String... values) {
return (long) this.invoke("rpush", new Object[] { key, values },
new Class[] { String.class, String[].class });
}
public long expire(String key, int time) {
return (long) this.invoke("expire", new Object[] { key, time },
new Class[] { String.class, int.class });
}
public long hsetnx(String key, String field, String value) {
return (long) this.invoke("hsetnx", new String[] { key, field, value },
new Class[] { String.class, String.class, String.class });
}
public boolean exist(String key) {
return (boolean) this.invoke("exists", new String[] { key },
String.class);
}
public boolean existInSet(String key, String member) {
return (boolean) this.invoke("sismember", new String[] { key, member },
new Class[] { String.class, String.class });
}
public long saddSet(String key, String... members) {
return (long) this.invoke("sadd", new Object[] { key, members },
new Class[] { String.class, String[].class });
}
public long sremSet(String key, String... members) {
return (long) this.invoke("srem", new Object[] { key, members },
new Class[] { String.class, String[].class });
}
public String spopSet(String key) {
return String.valueOf(this.invoke("spop", new Object[] { key },
new Class[] { String.class }));
}
public long hSet(byte[] key, byte[] field, byte[] value) {
return (long) this.invoke("hset", new Object[] { key, field, value },
new Class[] { byte[].class, byte[].class, byte[].class });
}
@SuppressWarnings("unchecked")
public Map<byte[], byte[]> hGetAll(byte[] key) {
return (Map<byte[], byte[]>) this.invoke("hgetAll",
new Object[] { key }, new Class[] { byte[].class });
}
public byte[] hGet(byte[] key, byte[] field) {
return (byte[]) this.invoke("hget", new Object[] { key, field },
new Class[] { byte[].class, byte[].class });
}
public long del(byte[] key) {
return (long) this.invoke("del", new Object[] { key }, byte[].class);
}
protected Object invoke(String methodName, Object[] args,
Class<?>... parameterTypes) {<|fim▁hole|> try {
/*
* Method method = clazz.getMethod(methodName, parameterTypes); ret
* = method.invoke(jedis, args);
*/
ret = access.invoke(jedis, methodName, parameterTypes, args);
} catch (Exception e) {
LOG.error(e.getMessage(), e);
pool.returnBrokenResource(jedis);
} finally {
pool.returnResource(jedis);
}
return ret;
}
}<|fim▁end|>
|
Object ret = null;
ShardedJedis jedis = pool.getResource();
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(old_io)]
//! This test file mostly just has tests that make sure that the macros successfully compile.
extern crate fern;<|fim▁hole|>use std::sync;
#[test]
fn test_log() {
fern::local::set_thread_logger(sync::Arc::new(
Box::new(fern::NullLogger) as fern::BoxedLogger));
log!(&fern::Level::Info, "expected info message");
}
#[test]
fn test_levels() {
fern::local::set_thread_logger(sync::Arc::new(
Box::new(fern::NullLogger) as fern::BoxedLogger));
debug!("expected debug message");
info!("expected info message");
warning!("expected warning message");
severe!("expected severe message");
}
fn does_not_error() -> Result<String, String> {
Ok("unexpected error message!".to_string())
}
fn errors() -> Result<String, String> {
Err("expected severe message".to_string())
}
#[test]
fn test_error_logging() {
fern::local::set_thread_logger(sync::Arc::new(
Box::new(fern::NullLogger) as fern::BoxedLogger));
log_error!(errors(), "expected error: {e:?}");
log_error!(does_not_error(), "unexpected error!: {e:?}");
}
#[test]
fn test_error_then_with_error() {
fern::local::set_thread_logger(sync::Arc::new(
Box::new(fern::NullLogger) as fern::BoxedLogger));
log_error_then!(errors(), return, "expected error: {e:?}");
panic!("Should have returned!");
}
#[test]
fn test_error_then_without_error() {
fern::local::set_thread_logger(sync::Arc::new(
Box::new(fern::NullLogger) as fern::BoxedLogger));
log_error_then!(does_not_error(), panic!("not expected!"),
"unexpected error: {e:?}");
}<|fim▁end|>
|
#[macro_use]
extern crate fern_macros;
|
<|file_name|>webhook_test.go<|end_file_name|><|fim▁begin|>// Copyright 2016 LINE Corporation
//
// LINE Corporation licenses this file to you under the Apache License,
// version 2.0 (the "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package linebot
import (
"bytes"
"crypto/hmac"
"crypto/sha256"
"crypto/tls"
"encoding/base64"
"encoding/json"
"net/http"
"net/http/httptest"
"reflect"
"testing"
"time"
)
var webhookTestRequestBody = `{
"events": [
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "message",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"message": {
"id": "325708",
"type": "text",
"text": "Hello, world"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "message",
"timestamp": 1462629479859,
"source": {
"type": "group",
"groupId": "u206d25c2ea6bd87c17655609a1c37cb8",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"message": {
"id": "325708",
"type": "text",
"text": "Hello, world"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "message",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"message": {
"id": "325708",
"type": "image"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "message",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"message": {
"id": "325708",
"type": "location",
"title": "hello",
"address": "〒150-0002 東京都渋谷区渋谷2丁目21−1",
"latitude": 35.65910807942215,
"longitude": 139.70372892916203
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "message",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"message": {
"id": "325708",
"type": "sticker",
"packageId": "1",
"stickerId": "1"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "follow",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
}
},
{
"type": "unfollow",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "join",
"timestamp": 1462629479859,
"source": {
"type": "group",
"groupId": "cxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
},
{
"type": "leave",
"timestamp": 1462629479859,
"source": {
"type": "group",
"groupId": "cxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "postback",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"postback": {
"data": "action=buyItem&itemId=123123&color=red"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "postback",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"postback": {
"data": "action=sel&only=date",
"params": {
"date": "2017-09-03"
}
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "postback",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"postback": {
"data": "action=sel&only=time",
"params": {
"time": "15:38"
}
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "postback",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "u206d25c2ea6bd87c17655609a1c37cb8"
},
"postback": {
"data": "action=sel",
"params": {
"datetime": "2017-09-03T15:38"
}
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "beacon",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "U012345678901234567890123456789ab"
},
"beacon": {
"hwid":"374591320",
"type":"enter"
}
},
{
"replyToken": "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
"type": "beacon",
"timestamp": 1462629479859,
"source": {
"type": "user",
"userId": "U012345678901234567890123456789ab"
},
"beacon": {
"hwid":"374591320",
"type":"enter",
"dm":"1234567890abcdef"
}
}
]
}
`
var webhookTestWantEvents = []*Event{
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeMessage,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Message: &TextMessage{
ID: "325708",
Text: "Hello, world",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeMessage,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeGroup,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
GroupID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Message: &TextMessage{
ID: "325708",
Text: "Hello, world",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeMessage,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Message: &ImageMessage{
ID: "325708",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeMessage,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Message: &LocationMessage{
ID: "325708",
Title: "hello",
Address: "〒150-0002 東京都渋谷区渋谷2丁目21−1",
Latitude: 35.65910807942215,
Longitude: 139.70372892916203,
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeMessage,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Message: &StickerMessage{
ID: "325708",
PackageID: "1",
StickerID: "1",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeFollow,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
},
{
Type: EventTypeUnfollow,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeJoin,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeGroup,
GroupID: "cxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
},
},
{
Type: EventTypeLeave,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeGroup,
GroupID: "cxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypePostback,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Postback: &Postback{
Data: "action=buyItem&itemId=123123&color=red",
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypePostback,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Postback: &Postback{
Data: "action=sel&only=date",
Params: &Params{
Date: "2017-09-03",
},
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypePostback,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Postback: &Postback{
Data: "action=sel&only=time",
Params: &Params{
Time: "15:38",
},
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypePostback,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "u206d25c2ea6bd87c17655609a1c37cb8",
},
Postback: &Postback{
Data: "action=sel",
Params: &Params{
Datetime: "2017-09-03T15:38",
},
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeBeacon,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "U012345678901234567890123456789ab",
},
Beacon: &Beacon{
Hwid: "374591320",
Type: BeaconEventTypeEnter,
DeviceMessage: []byte{},
},
},
{
ReplyToken: "nHuyWiB7yP5Zw52FIkcQobQuGDXCTA",
Type: EventTypeBeacon,
Timestamp: time.Date(2016, time.May, 7, 13, 57, 59, int(859*time.Millisecond), time.UTC),
Source: &EventSource{
Type: EventSourceTypeUser,
UserID: "U012345678901234567890123456789ab",
},
Beacon: &Beacon{
Hwid: "374591320",
Type: BeaconEventTypeEnter,
DeviceMessage: []byte{0x12, 0x34, 0x56, 0x78, 0x90, 0xab, 0xcd, 0xef},
},
},
}
func TestParseRequest(t *testing.T) {
server := httptest.NewTLSServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
client, err := New("testsecret", "testtoken")
if err != nil {
t.Error(err)
}
gotEvents, err := client.ParseRequest(r)
if err != nil {
if err == ErrInvalidSignature {
w.WriteHeader(400)
} else {
w.WriteHeader(500)
t.Error(err)
}
return
}
if len(gotEvents) != len(webhookTestWantEvents) {
t.Errorf("Event length %d; want %d", len(gotEvents), len(webhookTestWantEvents))
}
for i, got := range gotEvents {
want := webhookTestWantEvents[i]
if !reflect.DeepEqual(got, want) {
t.Errorf("Event %d %q; want %q", i, got, want)
}
}
}))
defer server.Close()
httpClient := &http.Client{
Transport: &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
},
}
// invalid signature
{
body := []byte(webhookTestRequestBody)
req, err := http.NewRequest("POST", server.URL, bytes.NewReader(body))
if err != nil {
t.Fatal(err)
}
req.Header.Set("X-Line-Signature", "invalidsignatue")
res, err := httpClient.Do(req)
if err != nil {
t.Fatal(err)
}
if res.StatusCode != 400 {
t.Errorf("StatusCode %d; want %d", res.StatusCode, 400)
}
}
// valid signature
{
body := []byte(webhookTestRequestBody)
req, err := http.NewRequest("POST", server.URL, bytes.NewReader(body))
if err != nil {
t.Fatal(err)
}
// generate signature
mac := hmac.New(sha256.New, []byte("testsecret"))
mac.Write(body)
req.Header.Set("X-Line-Signature", base64.StdEncoding.EncodeToString(mac.Sum(nil)))
res, err := httpClient.Do(req)
if err != nil {
t.Fatal(err)
}
if res == nil {
t.Error("response is nil")
}
if res.StatusCode != http.StatusOK {
t.Errorf("status: %d", res.StatusCode)
}
}
}
func TestEventMarshaling(t *testing.T) {
testCases := &struct {
Events []map[string]interface{} `json:"events"`
}{}
err := json.Unmarshal([]byte(webhookTestRequestBody), testCases)
if err != nil {
t.Fatal(err)
}
for i, want := range testCases.Events {
if err != nil {
t.Fatal(err)
}
e := webhookTestWantEvents[i]
gotJSON, err := json.Marshal(&e)
if err != nil {
t.Error(err)
continue
}
got := map[string]interface{}{}
err = json.Unmarshal(gotJSON, &got)
if err != nil {
t.Fatal(err)
}<|fim▁hole|> t.Errorf("Event marshal %d %q; want %q", i, got, want)
}
}
}
func BenchmarkParseRequest(b *testing.B) {
body := []byte(webhookTestRequestBody)
client, err := New("testsecret", "testtoken")
if err != nil {
b.Fatal(err)
}
mac := hmac.New(sha256.New, []byte("testsecret"))
mac.Write(body)
sign := base64.StdEncoding.EncodeToString(mac.Sum(nil))
b.ResetTimer()
for i := 0; i < b.N; i++ {
req, _ := http.NewRequest("POST", "", bytes.NewReader(body))
req.Header.Set("X-Line-Signature", sign)
client.ParseRequest(req)
}
}<|fim▁end|>
|
if !reflect.DeepEqual(got, want) {
|
<|file_name|>skin.py<|end_file_name|><|fim▁begin|>from Tools.Profile import profile
profile("LOAD:ElementTree")
import xml.etree.cElementTree
import os
profile("LOAD:enigma_skin")
from enigma import eSize, ePoint, eRect, gFont, eWindow, eLabel, ePixmap, eWindowStyleManager, \
addFont, gRGB, eWindowStyleSkinned, getDesktop
from Components.config import ConfigSubsection, ConfigText, config
from Components.Converter.Converter import Converter
from Components.Sources.Source import Source, ObsoleteSource
from Tools.Directories import resolveFilename, SCOPE_SKIN, SCOPE_FONTS, SCOPE_CURRENT_SKIN, SCOPE_CONFIG, fileExists, SCOPE_SKIN_IMAGE
from Tools.Import import my_import
from Tools.LoadPixmap import LoadPixmap
from Components.RcModel import rc_model
from Components.SystemInfo import SystemInfo
colorNames = {}
# Predefined fonts, typically used in built-in screens and for components like
# the movie list and so.
fonts = {
"Body": ("Regular", 18, 22, 16),
"ChoiceList": ("Regular", 20, 24, 18),
}
parameters = {}
def dump(x, i=0):
print " " * i + str(x)
try:
for n in x.childNodes:
dump(n, i + 1)
except:
None
class SkinError(Exception):
def __init__(self, message):
self.msg = message
def __str__(self):
return "{%s}: %s. Please contact the skin's author!" % (config.skin.primary_skin.value, self.msg)
dom_skins = [ ]
def addSkin(name, scope = SCOPE_SKIN):
# read the skin
filename = resolveFilename(scope, name)
if fileExists(filename):
mpath = os.path.dirname(filename) + "/"
try:
dom_skins.append((mpath, xml.etree.cElementTree.parse(filename).getroot()))
except:
print "[SKIN ERROR] error in %s" % filename
return False
else:
return True
return False
# get own skin_user_skinname.xml file, if exist
def skin_user_skinname():
name = "skin_user_" + config.skin.primary_skin.value[:config.skin.primary_skin.value.rfind('/')] + ".xml"
filename = resolveFilename(SCOPE_CONFIG, name)
if fileExists(filename):
return name
return None
# we do our best to always select the "right" value
# skins are loaded in order of priority: skin with
# highest priority is loaded last, usually the user-provided
# skin.
# currently, loadSingleSkinData (colors, bordersets etc.)
# are applied one-after-each, in order of ascending priority.
# the dom_skin will keep all screens in descending priority,
# so the first screen found will be used.
# example: loadSkin("nemesis_greenline/skin.xml")
config.skin = ConfigSubsection()
DEFAULT_SKIN = "PLi-HD/skin.xml"
# on SD hardware, PLi-HD will not be available
if not fileExists(resolveFilename(SCOPE_SKIN, DEFAULT_SKIN)):
# in that case, fallback to Magic (which is an SD skin)
DEFAULT_SKIN = "Magic/skin.xml"
config.skin.primary_skin = ConfigText(default=DEFAULT_SKIN)
profile("LoadSkin")
res = None
name = skin_user_skinname()
if name:
res = addSkin(name, SCOPE_CONFIG)
if not name or not res:
addSkin('skin_user.xml', SCOPE_CONFIG)
# some boxes lie about their dimensions
addSkin('skin_box.xml')
# add optional discrete second infobar
addSkin('skin_second_infobar.xml')
display_skin_id = 1
addSkin('skin_display.xml')
addSkin('skin_text.xml')
addSkin('skin_subtitles.xml')
try:
if not addSkin(config.skin.primary_skin.value):
raise SkinError, "primary skin not found"
except Exception, err:
print "SKIN ERROR:", err
skin = DEFAULT_SKIN
if config.skin.primary_skin.value == skin:
skin = 'skin.xml'
print "defaulting to standard skin...", skin
config.skin.primary_skin.value = skin
addSkin(skin)
del skin
addSkin('skin_default.xml')
profile("LoadSkinDefaultDone")
#
# Convert a string into a number. Used to convert object position and size attributes into a number
# s is the input string.
# e is the the parent object size to do relative calculations on parent
# size is the size of the object size (e.g. width or height)
# font is a font object to calculate relative to font sizes
# Note some constructs for speeding # up simple cases that are very common.
# Can do things like: 10+center-10w+4%
# To center the widget on the parent widget,
# but move forward 10 pixels and 4% of parent width
# and 10 character widths backward
# Multiplication, division and subexprsssions are also allowed: 3*(e-c/2)
#
# Usage: center : center the object on parent based on parent size and object size
# e : take the parent size/width
# c : take the center point of parent size/width
# % : take given percentag of parent size/width
# w : multiply by current font width
# h : multiply by current font height
#
def parseCoordinate(s, e, size=0, font=None):
s = s.strip()
if s == "center": # for speed, can be common case
val = (e - size)/2
elif s == '*':
return None
else:
try:
val = int(s) # for speed
except:
if 't' in s:
s = s.replace("center", str((e-size)/2.0))
if 'e' in s:
s = s.replace("e", str(e))
if 'c' in s:
s = s.replace("c", str(e/2.0))
if 'w' in s:
s = s.replace("w", "*" + str(fonts[font][3]))
if 'h' in s:
s = s.replace("h", "*" + str(fonts[font][2]))
if '%' in s:
s = s.replace("%", "*" + str(e/100.0))
try:
val = int(s) # for speed
except:
val = eval(s)
if val < 0:
return 0
return int(val) # make sure an integer value is returned
def getParentSize(object, desktop):
size = eSize()
if object:
parent = object.getParent()
# For some widgets (e.g. ScrollLabel) the skin attributes are applied to
# a child widget, instead of to the widget itself. In that case, the parent
# we have here is not the real parent, but it is the main widget.
# We have to go one level higher to get the actual parent.
# We can detect this because the 'parent' will not have a size yet
# (the main widget's size will be calculated internally, as soon as the child
# widget has parsed the skin attributes)
if parent and parent.size().isEmpty():
parent = parent.getParent()
if parent:
size = parent.size()
elif desktop:
#widget has no parent, use desktop size instead for relative coordinates
size = desktop.size()
return size
def parseValuePair(s, scale, object = None, desktop = None, size = None):
x, y = s.split(',')
parentsize = eSize()
if object and ('c' in x or 'c' in y or 'e' in x or 'e' in y or
'%' in x or '%' in y): # need parent size for ce%
parentsize = getParentSize(object, desktop)
xval = parseCoordinate(x, parentsize.width(), size and size.width() or 0)
yval = parseCoordinate(y, parentsize.height(), size and size.height() or 0)
return (xval * scale[0][0] / scale[0][1], yval * scale[1][0] / scale[1][1])
def parsePosition(s, scale, object = None, desktop = None, size = None):
(x, y) = parseValuePair(s, scale, object, desktop, size)
return ePoint(x, y)
def parseSize(s, scale, object = None, desktop = None):
(x, y) = parseValuePair(s, scale, object, desktop)
return eSize(x, y)
def parseFont(s, scale):
try:
f = fonts[s]
name = f[0]
size = f[1]
except:
name, size = s.split(';')
return gFont(name, int(size) * scale[0][0] / scale[0][1])
def parseColor(s):
if s[0] != '#':
try:
return colorNames[s]
except:
raise SkinError("color '%s' must be #aarrggbb or valid named color" % (s))
return gRGB(int(s[1:], 0x10))
def collectAttributes(skinAttributes, node, context, skin_path_prefix=None, ignore=(), filenames=frozenset(("pixmap", "pointer", "seek_pointer", "backgroundPixmap", "selectionPixmap", "sliderPixmap", "scrollbarbackgroundPixmap"))):
# walk all attributes
size = None
pos = None
font = None
for attrib, value in node.items():
if attrib not in ignore:
if attrib in filenames:
value = resolveFilename(SCOPE_CURRENT_SKIN, value, path_prefix=skin_path_prefix)
# Bit of a hack this, really. When a window has a flag (e.g. wfNoBorder)
# it needs to be set at least before the size is set, in order for the
# window dimensions to be calculated correctly in all situations.
# If wfNoBorder is applied after the size has been set, the window will fail to clear the title area.
# Similar situation for a scrollbar in a listbox; when the scrollbar setting is applied after
# the size, a scrollbar will not be shown until the selection moves for the first time
if attrib == 'size':
size = value.encode("utf-8")
elif attrib == 'position':
pos = value.encode("utf-8")
elif attrib == 'font':
font = value.encode("utf-8")
skinAttributes.append((attrib, font))
else:
skinAttributes.append((attrib, value.encode("utf-8")))
if pos is not None:
pos, size = context.parse(pos, size, font)
skinAttributes.append(('position', pos))
if size is not None:
skinAttributes.append(('size', size))
def morphRcImagePath(value):
if rc_model.rcIsDefault() is False:
if value == '/usr/share/enigma2/skin_default/rc.png' or value == '/usr/share/enigma2/skin_default/rcold.png':
value = rc_model.getRcImg()
return value
def loadPixmap(path, desktop):
option = path.find("#")
if option != -1:
path = path[:option]
ptr = LoadPixmap(morphRcImagePath(path), desktop)
if ptr is None:<|fim▁hole|> raise SkinError("pixmap file %s not found!" % (path))
return ptr
class AttributeParser:
def __init__(self, guiObject, desktop, scale=((1,1),(1,1))):
self.guiObject = guiObject
self.desktop = desktop
self.scaleTuple = scale
def applyOne(self, attrib, value):
try:
getattr(self, attrib)(value)
except AttributeError:
print "[Skin] Attribute not implemented:", attrib, "value:", value
except SkinError, ex:
print "[Skin] Error:", ex
def applyAll(self, attrs):
for attrib, value in attrs:
self.applyOne(attrib, value)
def conditional(self, value):
pass
def position(self, value):
if isinstance(value, tuple):
self.guiObject.move(ePoint(*value))
else:
self.guiObject.move(parsePosition(value, self.scaleTuple, self.guiObject, self.desktop, self.guiObject.csize()))
def size(self, value):
if isinstance(value, tuple):
self.guiObject.resize(eSize(*value))
else:
self.guiObject.resize(parseSize(value, self.scaleTuple, self.guiObject, self.desktop))
def title(self, value):
self.guiObject.setTitle(_(value))
def text(self, value):
self.guiObject.setText(_(value))
def font(self, value):
self.guiObject.setFont(parseFont(value, self.scaleTuple))
def zPosition(self, value):
self.guiObject.setZPosition(int(value))
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setPixmap(ptr)
def backgroundPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setBackgroundPicture(ptr)
def selectionPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSelectionPicture(ptr)
def sliderPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setSliderPicture(ptr)
def scrollbarbackgroundPixmap(self, value):
ptr = loadPixmap(value, self.desktop)
self.guiObject.setScrollbarBackgroundPicture(ptr)
def alphatest(self, value):
self.guiObject.setAlphatest(
{ "on": 1,
"off": 0,
"blend": 2,
}[value])
def scale(self, value):
self.guiObject.setScale(1)
def orientation(self, value): # used by eSlider
try:
self.guiObject.setOrientation(*
{ "orVertical": (self.guiObject.orVertical, False),
"orTopToBottom": (self.guiObject.orVertical, False),
"orBottomToTop": (self.guiObject.orVertical, True),
"orHorizontal": (self.guiObject.orHorizontal, False),
"orLeftToRight": (self.guiObject.orHorizontal, False),
"orRightToLeft": (self.guiObject.orHorizontal, True),
}[value])
except KeyError:
print "oprientation must be either orVertical or orHorizontal!"
def valign(self, value):
try:
self.guiObject.setVAlign(
{ "top": self.guiObject.alignTop,
"center": self.guiObject.alignCenter,
"bottom": self.guiObject.alignBottom
}[value])
except KeyError:
print "valign must be either top, center or bottom!"
def halign(self, value):
try:
self.guiObject.setHAlign(
{ "left": self.guiObject.alignLeft,
"center": self.guiObject.alignCenter,
"right": self.guiObject.alignRight,
"block": self.guiObject.alignBlock
}[value])
except KeyError:
print "halign must be either left, center, right or block!"
def textOffset(self, value):
x, y = value.split(',')
self.guiObject.setTextOffset(ePoint(int(x) * self.scaleTuple[0][0] / self.scaleTuple[0][1], int(y) * self.scaleTuple[1][0] / self.scaleTuple[1][1]))
def flags(self, value):
flags = value.split(',')
for f in flags:
try:
fv = eWindow.__dict__[f]
self.guiObject.setFlag(fv)
except KeyError:
print "illegal flag %s!" % f
def backgroundColor(self, value):
self.guiObject.setBackgroundColor(parseColor(value))
def backgroundColorSelected(self, value):
self.guiObject.setBackgroundColorSelected(parseColor(value))
def foregroundColor(self, value):
self.guiObject.setForegroundColor(parseColor(value))
def foregroundColorSelected(self, value):
self.guiObject.setForegroundColorSelected(parseColor(value))
def shadowColor(self, value):
self.guiObject.setShadowColor(parseColor(value))
def selectionDisabled(self, value):
self.guiObject.setSelectionEnable(0)
def transparent(self, value):
self.guiObject.setTransparent(int(value))
def borderColor(self, value):
self.guiObject.setBorderColor(parseColor(value))
def borderWidth(self, value):
self.guiObject.setBorderWidth(int(value))
def scrollbarMode(self, value):
self.guiObject.setScrollbarMode(getattr(self.guiObject, value))
# { "showOnDemand": self.guiObject.showOnDemand,
# "showAlways": self.guiObject.showAlways,
# "showNever": self.guiObject.showNever,
# "showLeft": self.guiObject.showLeft
# }[value])
def enableWrapAround(self, value):
self.guiObject.setWrapAround(True)
def itemHeight(self, value):
self.guiObject.setItemHeight(int(value))
def pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(0, ptr, pos)
def seek_pointer(self, value):
(name, pos) = value.split(':')
pos = parsePosition(pos, self.scaleTuple)
ptr = loadPixmap(name, self.desktop)
self.guiObject.setPointer(1, ptr, pos)
def shadowOffset(self, value):
self.guiObject.setShadowOffset(parsePosition(value, self.scaleTuple))
def noWrap(self, value):
self.guiObject.setNoWrap(1)
def applySingleAttribute(guiObject, desktop, attrib, value, scale = ((1,1),(1,1))):
# Someone still using applySingleAttribute?
AttributeParser(guiObject, desktop, scale).applyOne(attrib, value)
def applyAllAttributes(guiObject, desktop, attributes, scale):
AttributeParser(guiObject, desktop, scale).applyAll(attributes)
def loadSingleSkinData(desktop, skin, path_prefix):
"""loads skin data like colors, windowstyle etc."""
assert skin.tag == "skin", "root element in skin must be 'skin'!"
for c in skin.findall("output"):
id = c.attrib.get('id')
if id:
id = int(id)
else:
id = 0
if id == 0: # framebuffer
for res in c.findall("resolution"):
get_attr = res.attrib.get
xres = get_attr("xres")
if xres:
xres = int(xres)
else:
xres = 720
yres = get_attr("yres")
if yres:
yres = int(yres)
else:
yres = 576
bpp = get_attr("bpp")
if bpp:
bpp = int(bpp)
else:
bpp = 32
#print "Resolution:", xres,yres,bpp
from enigma import gMainDC
gMainDC.getInstance().setResolution(xres, yres)
desktop.resize(eSize(xres, yres))
if bpp != 32:
# load palette (not yet implemented)
pass
if yres >= 1080:
parameters["FileListName"] = (68,4,1000,34)
parameters["FileListIcon"] = (7,4,52,37)
parameters["FileListMultiName"] = (90,3,1000,32)
parameters["FileListMultiIcon"] = (45, 4, 30, 30)
parameters["FileListMultiLock"] = (2,0,36,36)
parameters["ChoicelistDash"] = (0,3,1000,30)
parameters["ChoicelistName"] = (68,3,1000,30)
parameters["ChoicelistIcon"] = (7,0,52,38)
parameters["PluginBrowserName"] = (180,8,38)
parameters["PluginBrowserDescr"] = (180,42,25)
parameters["PluginBrowserIcon"] = (15,8,150,60)
parameters["PluginBrowserDownloadName"] = (120,8,38)
parameters["PluginBrowserDownloadDescr"] = (120,42,25)
parameters["PluginBrowserDownloadIcon"] = (15,0,90,76)
parameters["ServiceInfo"] = (0,0,450,50)
parameters["ServiceInfoLeft"] = (0,0,450,45)
parameters["ServiceInfoRight"] = (450,0,1000,45)
parameters["SelectionListDescr"] = (45,3,1000,32)
parameters["SelectionListLock"] = (0,2,36,36)
parameters["ConfigListSeperator"] = 300
parameters["VirtualKeyboard"] = (68,68)
parameters["PartnerBoxEntryListName"] = (8,2,225,38)
parameters["PartnerBoxEntryListIP"] = (180,2,225,38)
parameters["PartnerBoxEntryListPort"] = (405,2,150,38)
parameters["PartnerBoxEntryListType"] = (615,2,150,38)
parameters["PartnerBoxTimerServicename"] = (0,0,45)
parameters["PartnerBoxTimerName"] = (0,42,30)
parameters["PartnerBoxE1TimerTime"] = (0,78,255,30)
parameters["PartnerBoxE1TimerState"] = (255,78,255,30)
parameters["PartnerBoxE2TimerTime"] = (0,78,225,30)
parameters["PartnerBoxE2TimerState"] = (225,78,225,30)
parameters["PartnerBoxE2TimerIcon"] = (1050,8,20,20)
parameters["PartnerBoxE2TimerIconRepeat"] = (1050,38,20,20)
parameters["PartnerBoxBouquetListName"] = (0,0,45)
parameters["PartnerBoxChannelListName"] = (0,0,45)
parameters["PartnerBoxChannelListTitle"] = (0,42,30)
parameters["PartnerBoxChannelListTime"] = (0,78,225,30)
parameters["HelpMenuListHlp"] = (0,0,900,42)
parameters["HelpMenuListExtHlp0"] = (0,0,900,39)
parameters["HelpMenuListExtHlp1"] = (0,42,900,30)
parameters["AboutHddSplit"] = 1
parameters["DreamexplorerName"] = (62,0,1200,38)
parameters["DreamexplorerIcon"] = (15,4,30,30)
parameters["PicturePlayerThumb"] = (30,285,45,300,30,25)
parameters["PlayListName"] = (38,2,1000,34)
parameters["PlayListIcon"] = (7,7,24,24)
parameters["SHOUTcastListItem"] = (30,27,35,96,35,33,60,32)
for skininclude in skin.findall("include"):
filename = skininclude.attrib.get("filename")
if filename:
skinfile = resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix)
if not fileExists(skinfile):
skinfile = resolveFilename(SCOPE_SKIN_IMAGE, filename, path_prefix=path_prefix)
if fileExists(skinfile):
print "[SKIN] loading include:", skinfile
loadSkin(skinfile)
for c in skin.findall("colors"):
for color in c.findall("color"):
get_attr = color.attrib.get
name = get_attr("name")
color = get_attr("value")
if name and color:
colorNames[name] = parseColor(color)
#print "Color:", name, color
else:
raise SkinError("need color and name, got %s %s" % (name, color))
for c in skin.findall("fonts"):
for font in c.findall("font"):
get_attr = font.attrib.get
filename = get_attr("filename", "<NONAME>")
name = get_attr("name", "Regular")
scale = get_attr("scale")
if scale:
scale = int(scale)
else:
scale = 100
is_replacement = get_attr("replacement") and True or False
render = get_attr("render")
if render:
render = int(render)
else:
render = 0
resolved_font = resolveFilename(SCOPE_FONTS, filename, path_prefix=path_prefix)
if not fileExists(resolved_font): #when font is not available look at current skin path
skin_path = resolveFilename(SCOPE_CURRENT_SKIN, filename)
if fileExists(skin_path):
resolved_font = skin_path
addFont(resolved_font, name, scale, is_replacement, render)
#print "Font: ", resolved_font, name, scale, is_replacement
for alias in c.findall("alias"):
get = alias.attrib.get
try:
name = get("name")
font = get("font")
size = int(get("size"))
height = int(get("height", size)) # to be calculated some day
width = int(get("width", size))
global fonts
fonts[name] = (font, size, height, width)
except Exception, ex:
print "[SKIN] bad font alias", ex
for c in skin.findall("parameters"):
for parameter in c.findall("parameter"):
get = parameter.attrib.get
try:
name = get("name")
value = get("value")
parameters[name] = "," in value and map(int, value.split(",")) or int(value)
except Exception, ex:
print "[SKIN] bad parameter", ex
for c in skin.findall("subtitles"):
from enigma import eWidget, eSubtitleWidget
scale = ((1,1),(1,1))
for substyle in c.findall("sub"):
get_attr = substyle.attrib.get
font = parseFont(get_attr("font"), scale)
col = get_attr("foregroundColor")
if col:
foregroundColor = parseColor(col)
haveColor = 1
else:
foregroundColor = gRGB(0xFFFFFF)
haveColor = 0
col = get_attr("borderColor")
if col:
borderColor = parseColor(col)
else:
borderColor = gRGB(0)
borderwidth = get_attr("borderWidth")
if borderwidth is None:
# default: use a subtitle border
borderWidth = 3
else:
borderWidth = int(borderwidth)
face = eSubtitleWidget.__dict__[get_attr("name")]
eSubtitleWidget.setFontStyle(face, font, haveColor, foregroundColor, borderColor, borderWidth)
for windowstyle in skin.findall("windowstyle"):
style = eWindowStyleSkinned()
style_id = windowstyle.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
# defaults
font = gFont("Regular", 20)
offset = eSize(20, 5)
for title in windowstyle.findall("title"):
get_attr = title.attrib.get
offset = parseSize(get_attr("offset"), ((1,1),(1,1)))
font = parseFont(get_attr("font"), ((1,1),(1,1)))
style.setTitleFont(font);
style.setTitleOffset(offset)
#print " ", font, offset
for borderset in windowstyle.findall("borderset"):
bsName = str(borderset.attrib.get("name"))
for pixmap in borderset.findall("pixmap"):
get_attr = pixmap.attrib.get
bpName = get_attr("pos")
filename = get_attr("filename")
if filename and bpName:
png = loadPixmap(resolveFilename(SCOPE_CURRENT_SKIN, filename, path_prefix=path_prefix), desktop)
style.setPixmap(eWindowStyleSkinned.__dict__[bsName], eWindowStyleSkinned.__dict__[bpName], png)
#print " borderset:", bpName, filename
for color in windowstyle.findall("color"):
get_attr = color.attrib.get
colorType = get_attr("name")
color = parseColor(get_attr("color"))
try:
style.setColor(eWindowStyleSkinned.__dict__["col" + colorType], color)
except:
raise SkinError("Unknown color %s" % (colorType))
#pass
#print " color:", type, color
x = eWindowStyleManager.getInstance()
x.setStyle(style_id, style)
for margin in skin.findall("margin"):
style_id = margin.attrib.get("id")
if style_id:
style_id = int(style_id)
else:
style_id = 0
r = eRect(0,0,0,0)
v = margin.attrib.get("left")
if v:
r.setLeft(int(v))
v = margin.attrib.get("top")
if v:
r.setTop(int(v))
v = margin.attrib.get("right")
if v:
r.setRight(int(v))
v = margin.attrib.get("bottom")
if v:
r.setBottom(int(v))
# the "desktop" parameter is hardcoded to the UI screen, so we must ask
# for the one that this actually applies to.
getDesktop(style_id).setMargins(r)
dom_screens = {}
def loadSkin(name, scope = SCOPE_SKIN):
# Now a utility for plugins to add skin data to the screens
global dom_screens, display_skin_id
filename = resolveFilename(scope, name)
if fileExists(filename):
path = os.path.dirname(filename) + "/"
for elem in xml.etree.cElementTree.parse(filename).getroot():
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
print "loadSkin: Screen already defined elsewhere:", name
elem.clear()
else:
dom_screens[name] = (elem, path)
else:
elem.clear()
else:
elem.clear()
def loadSkinData(desktop):
# Kinda hackish, but this is called once by mytest.py
global dom_skins
skins = dom_skins[:]
skins.reverse()
for (path, dom_skin) in skins:
loadSingleSkinData(desktop, dom_skin, path)
for elem in dom_skin:
if elem.tag == 'screen':
name = elem.attrib.get('name', None)
if name:
sid = elem.attrib.get('id', None)
if sid and (sid != display_skin_id):
# not for this display
elem.clear()
continue
if name in dom_screens:
# Kill old versions, save memory
dom_screens[name][0].clear()
dom_screens[name] = (elem, path)
else:
# without name, it's useless!
elem.clear()
else:
# non-screen element, no need for it any longer
elem.clear()
# no longer needed, we know where the screens are now.
del dom_skins
class additionalWidget:
pass
# Class that makes a tuple look like something else. Some plugins just assume
# that size is a string and try to parse it. This class makes that work.
class SizeTuple(tuple):
def split(self, *args):
return (str(self[0]), str(self[1]))
def strip(self, *args):
return '%s,%s' % self
def __str__(self):
return '%s,%s' % self
class SkinContext:
def __init__(self, parent=None, pos=None, size=None, font=None):
if parent is not None:
if pos is not None:
pos, size = parent.parse(pos, size, font)
self.x, self.y = pos
self.w, self.h = size
else:
self.x = None
self.y = None
self.w = None
self.h = None
def __str__(self):
return "Context (%s,%s)+(%s,%s) " % (self.x, self.y, self.w, self.h)
def parse(self, pos, size, font):
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
self.w = 0
self.h = 0
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
self.h -= h
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
self.h -= h
self.y += h
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
self.x += w
self.w -= w
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
self.w -= w
else:
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return (SizeTuple(pos), SizeTuple(size))
class SkinContextStack(SkinContext):
# A context that stacks things instead of aligning them
def parse(self, pos, size, font):
if pos == "fill":
pos = (self.x, self.y)
size = (self.w, self.h)
else:
w,h = size.split(',')
w = parseCoordinate(w, self.w, 0, font)
h = parseCoordinate(h, self.h, 0, font)
if pos == "bottom":
pos = (self.x, self.y + self.h - h)
size = (self.w, h)
elif pos == "top":
pos = (self.x, self.y)
size = (self.w, h)
elif pos == "left":
pos = (self.x, self.y)
size = (w, self.h)
elif pos == "right":
pos = (self.x + self.w - w, self.y)
size = (w, self.h)
else:
size = (w, h)
pos = pos.split(',')
pos = (self.x + parseCoordinate(pos[0], self.w, size[0], font), self.y + parseCoordinate(pos[1], self.h, size[1], font))
return (SizeTuple(pos), SizeTuple(size))
def readSkin(screen, skin, names, desktop):
if not isinstance(names, list):
names = [names]
# try all skins, first existing one have priority
global dom_screens
for n in names:
myscreen, path = dom_screens.get(n, (None,None))
if myscreen is not None:
# use this name for debug output
name = n
break
else:
name = "<embedded-in-'%s'>" % screen.__class__.__name__
# otherwise try embedded skin
if myscreen is None:
myscreen = getattr(screen, "parsedSkin", None)
# try uncompiled embedded skin
if myscreen is None and getattr(screen, "skin", None):
skin = screen.skin
print "[SKIN] Parsing embedded skin", name
if (isinstance(skin, tuple)):
for s in skin:
candidate = xml.etree.cElementTree.fromstring(s)
if candidate.tag == 'screen':
sid = candidate.attrib.get('id', None)
if (not sid) or (int(sid) == display_skin_id):
myscreen = candidate
break;
else:
print "[SKIN] Hey, no suitable screen!"
else:
myscreen = xml.etree.cElementTree.fromstring(skin)
if myscreen:
screen.parsedSkin = myscreen
if myscreen is None:
print "[SKIN] No skin to read..."
myscreen = screen.parsedSkin = xml.etree.cElementTree.fromstring("<screen></screen>")
screen.skinAttributes = [ ]
skin_path_prefix = getattr(screen, "skin_path", path)
context = SkinContextStack()
s = desktop.bounds()
context.x = s.left()
context.y = s.top()
context.w = s.width()
context.h = s.height()
del s
collectAttributes(screen.skinAttributes, myscreen, context, skin_path_prefix, ignore=("name",))
context = SkinContext(context, myscreen.attrib.get('position'), myscreen.attrib.get('size'))
screen.additionalWidgets = [ ]
screen.renderer = [ ]
visited_components = set()
# now walk all widgets and stuff
def process_none(widget, context):
pass
def process_widget(widget, context):
get_attr = widget.attrib.get
# ok, we either have 1:1-mapped widgets ('old style'), or 1:n-mapped
# widgets (source->renderer).
wname = get_attr('name')
wsource = get_attr('source')
if wname is None and wsource is None:
print "widget has no name and no source!"
return
if wname:
#print "Widget name=", wname
visited_components.add(wname)
# get corresponding 'gui' object
try:
attributes = screen[wname].skinAttributes = [ ]
except:
raise SkinError("component with name '" + wname + "' was not found in skin of screen '" + name + "'!")
# assert screen[wname] is not Source
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('name',))
elif wsource:
# get corresponding source
#print "Widget source=", wsource
while True: # until we found a non-obsolete source
# parse our current "wsource", which might specifiy a "related screen" before the dot,
# for example to reference a parent, global or session-global screen.
scr = screen
# resolve all path components
path = wsource.split('.')
while len(path) > 1:
scr = screen.getRelatedScreen(path[0])
if scr is None:
#print wsource
#print name
raise SkinError("specified related screen '" + wsource + "' was not found in screen '" + name + "'!")
path = path[1:]
# resolve the source.
source = scr.get(path[0])
if isinstance(source, ObsoleteSource):
# however, if we found an "obsolete source", issue warning, and resolve the real source.
print "WARNING: SKIN '%s' USES OBSOLETE SOURCE '%s', USE '%s' INSTEAD!" % (name, wsource, source.new_source)
print "OBSOLETE SOURCE WILL BE REMOVED %s, PLEASE UPDATE!" % (source.removal_date)
if source.description:
print source.description
wsource = source.new_source
else:
# otherwise, use that source.
break
if source is None:
raise SkinError("source '" + wsource + "' was not found in screen '" + name + "'!")
wrender = get_attr('render')
if not wrender:
raise SkinError("you must define a renderer with render= for source '%s'" % (wsource))
for converter in widget.findall("convert"):
ctype = converter.get('type')
assert ctype, "'convert'-tag needs a 'type'-attribute"
#print "Converter:", ctype
try:
parms = converter.text.strip()
except:
parms = ""
#print "Params:", parms
converter_class = my_import('.'.join(("Components", "Converter", ctype))).__dict__.get(ctype)
c = None
for i in source.downstream_elements:
if isinstance(i, converter_class) and i.converter_arguments == parms:
c = i
if c is None:
c = converter_class(parms)
c.connect(source)
source = c
renderer_class = my_import('.'.join(("Components", "Renderer", wrender))).__dict__.get(wrender)
renderer = renderer_class() # instantiate renderer
renderer.connect(source) # connect to source
attributes = renderer.skinAttributes = [ ]
collectAttributes(attributes, widget, context, skin_path_prefix, ignore=('render', 'source'))
screen.renderer.append(renderer)
def process_applet(widget, context):
try:
codeText = widget.text.strip()
widgetType = widget.attrib.get('type')
code = compile(codeText, "skin applet", "exec")
except Exception, ex:
raise SkinError("applet failed to compile: " + str(ex))
if widgetType == "onLayoutFinish":
screen.onLayoutFinish.append(code)
else:
raise SkinError("applet type '%s' unknown!" % widgetType)
def process_elabel(widget, context):
w = additionalWidget()
w.widget = eLabel
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_epixmap(widget, context):
w = additionalWidget()
w.widget = ePixmap
w.skinAttributes = [ ]
collectAttributes(w.skinAttributes, widget, context, skin_path_prefix, ignore=('name',))
screen.additionalWidgets.append(w)
def process_screen(widget, context):
for w in widget.getchildren():
conditional = w.attrib.get('conditional')
if conditional and not [i for i in conditional.split(",") if i in screen.keys()]:
continue
p = processors.get(w.tag, process_none)
try:
p(w, context)
except SkinError, e:
print "[Skin] SKIN ERROR in screen '%s' widget '%s':" % (name, w.tag), e
def process_panel(widget, context):
n = widget.attrib.get('name')
if n:
try:
s = dom_screens[n]
except KeyError:
print "[SKIN] Unable to find screen '%s' referred in screen '%s'" % (n, name)
else:
process_screen(s[0], context)
layout = widget.attrib.get('layout')
if layout == 'stack':
cc = SkinContextStack
else:
cc = SkinContext
try:
c = cc(context, widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'))
except Exception, ex:
raise SkinError("Failed to create skincontext (%s,%s,%s) in %s: %s" % (widget.attrib.get('position'), widget.attrib.get('size'), widget.attrib.get('font'), context, ex) )
process_screen(widget, c)
processors = {
None: process_none,
"widget": process_widget,
"applet": process_applet,
"eLabel": process_elabel,
"ePixmap": process_epixmap,
"panel": process_panel
}
try:
context.x = 0 # reset offsets, all components are relative to screen
context.y = 0 # coordinates.
process_screen(myscreen, context)
except Exception, e:
print "[Skin] SKIN ERROR in %s:" % name, e
from Components.GUIComponent import GUIComponent
nonvisited_components = [x for x in set(screen.keys()) - visited_components if isinstance(x, GUIComponent)]
assert not nonvisited_components, "the following components in %s don't have a skin entry: %s" % (name, ', '.join(nonvisited_components))
# This may look pointless, but it unbinds 'screen' from the nested scope. A better
# solution is to avoid the nested scope above and use the context object to pass
# things around.
screen = None
visited_components = None<|fim▁end|>
| |
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>/// A point relative to the dimensions of a region.
pub enum AnchorPoint {
Center,
Bottom,
Top,
Left,
Right,
BottomLeft,
BottomRight,<|fim▁hole|>// General layering order of frames
pub enum FrameStrata {
Background,
Low,
Medium,
High,
Dialog,
Fullscreen,
FullscreenDialog,
Tooltip
}
/// Horizontal alignment of text within a widget.
pub enum JustifyH {
Left,
Center,
Right
}
/// Vertical alignment of text within a widget.
pub enum JustifyV {
Top,
Middle,
Bottom
}
pub enum Layer {
/// first layer (lowest).
Background,
/// Second layer.
Border,
/// Third layer (default).
Artwork,
/// Fourth layer.
Overlay,
/// Fifth layer (highest).
Highlight
}<|fim▁end|>
|
TopLeft,
TopRight
}
|
<|file_name|>storage_file.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the storage file CLI arguments helper."""
import argparse
import unittest
from plaso.cli import tools
from plaso.cli.helpers import storage_file
from plaso.lib import errors
from tests.cli import test_lib as cli_test_lib
class StorageFileArgumentsHelperTest(cli_test_lib.CLIToolTestCase):
"""Tests for the storage file CLI arguments helper."""
# pylint: disable=no-member,protected-access
_EXPECTED_OUTPUT = """\
usage: cli_helper.py [STORAGE_FILE]
Test argument parser.
positional arguments:
STORAGE_FILE Path to a storage file.
"""
def testAddArguments(self):
"""Tests the AddArguments function."""
argument_parser = argparse.ArgumentParser(
prog='cli_helper.py', description='Test argument parser.',
add_help=False,
formatter_class=cli_test_lib.SortedArgumentsHelpFormatter)
storage_file.StorageFileArgumentsHelper.AddArguments(argument_parser)
output = self._RunArgparseFormatHelp(argument_parser)
self.assertEqual(output, self._EXPECTED_OUTPUT)
def testParseOptions(self):
"""Tests the ParseOptions function."""
test_tool = tools.CLITool()
options = cli_test_lib.TestOptions()
options.storage_file = self._GetTestFilePath(['test.plaso'])
storage_file.StorageFileArgumentsHelper.ParseOptions(options, test_tool)<|fim▁hole|>
if __name__ == '__main__':
unittest.main()<|fim▁end|>
|
self.assertEqual(test_tool._storage_file_path, options.storage_file)
with self.assertRaises(errors.BadConfigObject):
storage_file.StorageFileArgumentsHelper.ParseOptions(options, None)
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
import v1 from './v1'
import v2 from './v2'
import v3 from './v3'
import v4 from './v4'
import ugc from './ugc'
import bos from './bos'
import eus from './eus'
import admin from './admin'
import payapi from './payapi'
import statis from './statis'
import member from './member'
import shopping from './shopping'
import promotion from './promotion'
export default app => {
// app.get('/', (req, res, next) => {
// res.redirect('/');
// });
app.use('/v1', v1);
app.use('/v2', v2);
app.use('/v3', v3);
app.use('/v4', v4);<|fim▁hole|> app.use('/eus', eus);
app.use('/admin', admin);
app.use('/payapi', payapi);
app.use('/member', member);
app.use('/statis', statis);
app.use('/shopping', shopping);
app.use('/promotion', promotion);
}<|fim▁end|>
|
app.use('/ugc', ugc);
app.use('/bos', bos);
|
<|file_name|>0016_service_position.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-22 21:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('statusboard', '0015_merge_20170222_2058'),<|fim▁hole|> migrations.AddField(
model_name='service',
name='position',
field=models.PositiveIntegerField(default=0),
),
]<|fim▁end|>
|
]
operations = [
|
<|file_name|>keyframes.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Keyframes: https://drafts.csswg.org/css-animations/#keyframes
#![deny(missing_docs)]
use cssparser::{AtRuleParser, Parser, QualifiedRuleParser, RuleListParser};
use cssparser::{DeclarationListParser, DeclarationParser, parse_one_rule};
use error_reporting::NullReporter;
use parser::{PARSING_MODE_DEFAULT, ParserContext, log_css_error};
use properties::{Importance, PropertyDeclaration, PropertyDeclarationBlock, PropertyId};
use properties::{PropertyDeclarationId, LonghandId, ParsedDeclaration};
use properties::LonghandIdSet;
use properties::animated_properties::TransitionProperty;
use properties::longhands::transition_timing_function::single_value::SpecifiedValue as SpecifiedTimingFunction;
use shared_lock::{SharedRwLock, SharedRwLockReadGuard, Locked, ToCssWithGuard};
use std::fmt;
use style_traits::ToCss;
use stylearc::Arc;
use stylesheets::{CssRuleType, Stylesheet, VendorPrefix};
/// A number from 0 to 1, indicating the percentage of the animation when this
/// keyframe should run.
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframePercentage(pub f32);
impl ::std::cmp::Ord for KeyframePercentage {
#[inline]
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
// We know we have a number from 0 to 1, so unwrap() here is safe.
self.0.partial_cmp(&other.0).unwrap()
}
}
impl ::std::cmp::Eq for KeyframePercentage { }
impl ToCss for KeyframePercentage {
fn to_css<W>(&self, dest: &mut W) -> fmt::Result where W: fmt::Write {
write!(dest, "{}%", self.0 * 100.0)
}
}
impl KeyframePercentage {
/// Trivially constructs a new `KeyframePercentage`.
#[inline]
pub fn new(value: f32) -> KeyframePercentage {
debug_assert!(value >= 0. && value <= 1.);
KeyframePercentage(value)
}
fn parse(input: &mut Parser) -> Result<KeyframePercentage, ()> {
let percentage = if input.try(|input| input.expect_ident_matching("from")).is_ok() {
KeyframePercentage::new(0.)
} else if input.try(|input| input.expect_ident_matching("to")).is_ok() {
KeyframePercentage::new(1.)
} else {
let percentage = try!(input.expect_percentage());
if percentage >= 0. && percentage <= 1. {
KeyframePercentage::new(percentage)
} else {
return Err(());
}
};
Ok(percentage)
}
}
/// A keyframes selector is a list of percentages or from/to symbols, which are
/// converted at parse time to percentages.
#[derive(Debug, PartialEq)]
pub struct KeyframeSelector(Vec<KeyframePercentage>);
impl KeyframeSelector {
/// Return the list of percentages this selector contains.
#[inline]
pub fn percentages(&self) -> &[KeyframePercentage] {
&self.0
}
/// A dummy public function so we can write a unit test for this.
pub fn new_for_unit_testing(percentages: Vec<KeyframePercentage>) -> KeyframeSelector {
KeyframeSelector(percentages)
}
/// Parse a keyframe selector from CSS input.
pub fn parse(input: &mut Parser) -> Result<Self, ()> {
input.parse_comma_separated(KeyframePercentage::parse)
.map(KeyframeSelector)
}
}
/// A keyframe.
#[derive(Debug)]
pub struct Keyframe {
/// The selector this keyframe was specified from.
pub selector: KeyframeSelector,
/// The declaration block that was declared inside this keyframe.
///
/// Note that `!important` rules in keyframes don't apply, but we keep this
/// `Arc` just for convenience.
pub block: Arc<Locked<PropertyDeclarationBlock>>,
}
impl ToCssWithGuard for Keyframe {
fn to_css<W>(&self, guard: &SharedRwLockReadGuard, dest: &mut W) -> fmt::Result
where W: fmt::Write {
let mut iter = self.selector.percentages().iter();
try!(iter.next().unwrap().to_css(dest));
for percentage in iter {
try!(write!(dest, ", "));
try!(percentage.to_css(dest));
}
try!(dest.write_str(" { "));
try!(self.block.read_with(guard).to_css(dest));
try!(dest.write_str(" }"));
Ok(())
}
}
impl Keyframe {
/// Parse a CSS keyframe.
pub fn parse(css: &str, parent_stylesheet: &Stylesheet)
-> Result<Arc<Locked<Self>>, ()> {
let error_reporter = NullReporter;
let context = ParserContext::new(parent_stylesheet.origin,
&parent_stylesheet.url_data,
&error_reporter,
Some(CssRuleType::Keyframe),
PARSING_MODE_DEFAULT,
parent_stylesheet.quirks_mode);
let mut input = Parser::new(css);
let mut rule_parser = KeyframeListParser {
context: &context,
shared_lock: &parent_stylesheet.shared_lock,
};
parse_one_rule(&mut input, &mut rule_parser)
}
}
/// A keyframes step value. This can be a synthetised keyframes animation, that
/// is, one autogenerated from the current computed values, or a list of
/// declarations to apply.
///
/// TODO: Find a better name for this?
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub enum KeyframesStepValue {
/// A step formed by a declaration block specified by the CSS.
Declarations {
/// The declaration block per se.
#[cfg_attr(feature = "servo", ignore_heap_size_of = "Arc")]
block: Arc<Locked<PropertyDeclarationBlock>>
},
/// A synthetic step computed from the current computed values at the time
/// of the animation.
ComputedValues,
}
/// A single step from a keyframe animation.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesStep {
/// The percentage of the animation duration when this step starts.
pub start_percentage: KeyframePercentage,
/// Declarations that will determine the final style during the step, or
/// `ComputedValues` if this is an autogenerated step.
pub value: KeyframesStepValue,
/// Wether a animation-timing-function declaration exists in the list of
/// declarations.
///
/// This is used to know when to override the keyframe animation style.
pub declared_timing_function: bool,
}
impl KeyframesStep {
#[inline]
fn new(percentage: KeyframePercentage,
value: KeyframesStepValue,
guard: &SharedRwLockReadGuard) -> Self {
let declared_timing_function = match value {
KeyframesStepValue::Declarations { ref block } => {
block.read_with(guard).declarations().iter().any(|&(ref prop_decl, _)| {
match *prop_decl {
PropertyDeclaration::AnimationTimingFunction(..) => true,
_ => false,
}
})
}
_ => false,
};
KeyframesStep {
start_percentage: percentage,
value: value,
declared_timing_function: declared_timing_function,
}
}
/// Return specified TransitionTimingFunction if this KeyframesSteps has 'animation-timing-function'.
pub fn get_animation_timing_function(&self, guard: &SharedRwLockReadGuard)
-> Option<SpecifiedTimingFunction> {
if !self.declared_timing_function {
return None;
}
match self.value {
KeyframesStepValue::Declarations { ref block } => {
let guard = block.read_with(guard);
let &(ref declaration, _) =
guard.get(PropertyDeclarationId::Longhand(LonghandId::AnimationTimingFunction)).unwrap();
match *declaration {
PropertyDeclaration::AnimationTimingFunction(ref value) => {
// Use the first value.
Some(value.0[0])
},
PropertyDeclaration::CSSWideKeyword(..) => None,
PropertyDeclaration::WithVariables(..) => None,
_ => panic!(),
}
},
KeyframesStepValue::ComputedValues => {
panic!("Shouldn't happen to set animation-timing-function in missing keyframes")
},
}
}
}
/// This structure represents a list of animation steps computed from the list
/// of keyframes, in order.
///
/// It only takes into account animable properties.
#[derive(Debug)]
#[cfg_attr(feature = "servo", derive(HeapSizeOf))]
pub struct KeyframesAnimation {
/// The difference steps of the animation.
pub steps: Vec<KeyframesStep>,
/// The properties that change in this animation.
pub properties_changed: Vec<TransitionProperty>,
/// Vendor prefix type the @keyframes has.
pub vendor_prefix: Option<VendorPrefix>,
}
/// Get all the animated properties in a keyframes animation.
fn get_animated_properties(keyframes: &[Arc<Locked<Keyframe>>], guard: &SharedRwLockReadGuard)
-> Vec<TransitionProperty> {
let mut ret = vec![];
let mut seen = LonghandIdSet::new();
// NB: declarations are already deduplicated, so we don't have to check for
// it here.
for keyframe in keyframes {
let keyframe = keyframe.read_with(&guard);
let block = keyframe.block.read_with(guard);
for &(ref declaration, importance) in block.declarations().iter() {
assert!(!importance.important());
if let Some(property) = TransitionProperty::from_declaration(declaration) {
if !seen.has_transition_property_bit(&property) {
seen.set_transition_property_bit(&property);
ret.push(property);
}
}
}
}
ret
}
impl KeyframesAnimation {
/// Create a keyframes animation from a given list of keyframes.
///
/// This will return a keyframe animation with empty steps and
/// properties_changed if the list of keyframes is empty, or there are no
// animated properties obtained from the keyframes.
///
/// Otherwise, this will compute and sort the steps used for the animation,
/// and return the animation object.
pub fn from_keyframes(keyframes: &[Arc<Locked<Keyframe>>],
vendor_prefix: Option<VendorPrefix>,
guard: &SharedRwLockReadGuard)
-> Self {
let mut result = KeyframesAnimation {
steps: vec![],
properties_changed: vec![],
vendor_prefix: vendor_prefix,
};
if keyframes.is_empty() {
return result;
}
result.properties_changed = get_animated_properties(keyframes, guard);
if result.properties_changed.is_empty() {
return result;
}
for keyframe in keyframes {
let keyframe = keyframe.read_with(&guard);
for percentage in keyframe.selector.0.iter() {
result.steps.push(KeyframesStep::new(*percentage, KeyframesStepValue::Declarations {
block: keyframe.block.clone(),
}, guard));
}
}
// Sort by the start percentage, so we can easily find a frame.
result.steps.sort_by_key(|step| step.start_percentage);
// Prepend autogenerated keyframes if appropriate.
if result.steps[0].start_percentage.0 != 0. {
result.steps.insert(0, KeyframesStep::new(KeyframePercentage::new(0.),
KeyframesStepValue::ComputedValues,
guard));
}
if result.steps.last().unwrap().start_percentage.0 != 1. {
result.steps.push(KeyframesStep::new(KeyframePercentage::new(1.),
KeyframesStepValue::ComputedValues,
guard));
}
result
}
}
/// Parses a keyframes list, like:
/// 0%, 50% {
/// width: 50%;
/// }
///
/// 40%, 60%, 100% {
/// width: 100%;
/// }
struct KeyframeListParser<'a> {
context: &'a ParserContext<'a>,
shared_lock: &'a SharedRwLock,
}
/// Parses a keyframe list from CSS input.
pub fn parse_keyframe_list(context: &ParserContext, input: &mut Parser, shared_lock: &SharedRwLock)
-> Vec<Arc<Locked<Keyframe>>> {
RuleListParser::new_for_nested_rule(input, KeyframeListParser {
context: context,
shared_lock: shared_lock,
}).filter_map(Result::ok).collect()
}
enum Void {}
impl<'a> AtRuleParser for KeyframeListParser<'a> {
type Prelude = Void;
type AtRule = Arc<Locked<Keyframe>>;
}
impl<'a> QualifiedRuleParser for KeyframeListParser<'a> {
type Prelude = KeyframeSelector;
type QualifiedRule = Arc<Locked<Keyframe>>;
fn parse_prelude(&mut self, input: &mut Parser) -> Result<Self::Prelude, ()> {
let start = input.position();
match KeyframeSelector::parse(input) {
Ok(sel) => Ok(sel),
Err(()) => {
let message = format!("Invalid keyframe rule: '{}'", input.slice_from(start));
log_css_error(input, start, &message, self.context);
Err(())
}<|fim▁hole|>
fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser)
-> Result<Self::QualifiedRule, ()> {
let context = ParserContext::new_with_rule_type(self.context, Some(CssRuleType::Keyframe));
let parser = KeyframeDeclarationParser {
context: &context,
};
let mut iter = DeclarationListParser::new(input, parser);
let mut block = PropertyDeclarationBlock::new();
while let Some(declaration) = iter.next() {
match declaration {
Ok(parsed) => parsed.expand_push_into(&mut block, Importance::Normal),
Err(range) => {
let pos = range.start;
let message = format!("Unsupported keyframe property declaration: '{}'",
iter.input.slice(range));
log_css_error(iter.input, pos, &*message, &context);
}
}
// `parse_important` is not called here, `!important` is not allowed in keyframe blocks.
}
Ok(Arc::new(self.shared_lock.wrap(Keyframe {
selector: prelude,
block: Arc::new(self.shared_lock.wrap(block)),
})))
}
}
struct KeyframeDeclarationParser<'a, 'b: 'a> {
context: &'a ParserContext<'b>,
}
/// Default methods reject all at rules.
impl<'a, 'b> AtRuleParser for KeyframeDeclarationParser<'a, 'b> {
type Prelude = ();
type AtRule = ParsedDeclaration;
}
impl<'a, 'b> DeclarationParser for KeyframeDeclarationParser<'a, 'b> {
type Declaration = ParsedDeclaration;
fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result<ParsedDeclaration, ()> {
let id = try!(PropertyId::parse(name.into()));
match ParsedDeclaration::parse(id, self.context, input) {
Ok(parsed) => {
// In case there is still unparsed text in the declaration, we should roll back.
if !input.is_exhausted() {
Err(())
} else {
Ok(parsed)
}
}
Err(_) => Err(())
}
}
}<|fim▁end|>
|
}
}
|
<|file_name|>process.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
delineator = "//"
hashtag = "#"
# generate poems from a file
# out: list of poem lines
def generate_poems(filename):
g = []
# get to the first poem in the file
with open(filename, 'r') as f:
for line in f:
line = line.rstrip()
if line.startswith( delineator ) and g:
yield g
g = []
if line:
g.append(line)
yield g
# convert a list of strings
# into a poem dictionary
def to_dictionary(poem_lines):
d = {}<|fim▁hole|> d['content'] = []
d['tags'] = []
tags = []
for line in poem_lines:
if line.startswith( delineator ):
d['title'] = line.lstrip( delineator ).strip()
elif line.startswith( hashtag ):
tags.append(line)
else:
d['content'].append(line) # do not strip to preserve indentation
for line in tags:
for tag in \
(t.strip() for t in line.split( hashtag ) if t):
d['tags'].append(tag)
return d<|fim▁end|>
| |
<|file_name|>ast.py<|end_file_name|><|fim▁begin|># mako/ast.py
# Copyright 2006-2020 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""utilities for analyzing expressions and blocks of Python
code, as well as generating Python from AST nodes"""
import re
from mako import compat
from mako import exceptions
from mako import pyparser
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in
# the code
self.declared_identifiers = set()
# represents all identifiers which are referenced before their
# assignment, if any
self.undeclared_identifiers = set()
# note that an identifier can be in both the undeclared and declared
# lists.
# using AST to parse instead of using code.co_varnames,
# code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if
# its declared later in the same block of code
# - AST is less likely to break with version changes
# (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, compat.string_types):
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = set()
self.undeclared_identifiers = set()
if isinstance(code, compat.string_types):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = pyparser.parse(code, "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)<|fim▁hole|>
"""extends PythonCode to provide identifier lookups in partial control
statements
e.g.::
for x in 5:
elif y==9:
except (MyException, e):
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S)
if not m:
raise exceptions.CompileException(
"Fragment '%s' is not a partial control statement" % code,
**exception_kwargs
)
if m.group(3):
code = code[: m.start(3)]
(keyword, expr) = m.group(1, 2)
if keyword in ["for", "if", "while"]:
code = code + "pass"
elif keyword == "try":
code = code + "pass\nexcept:pass"
elif keyword == "elif" or keyword == "else":
code = "if False:pass\n" + code + "pass"
elif keyword == "except":
code = "try:pass\n" + code + "pass"
elif keyword == "with":
code = code + "pass"
else:
raise exceptions.CompileException(
"Unsupported control keyword: '%s'" % keyword,
**exception_kwargs
)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, "funcname"):
raise exceptions.CompileException(
"Code '%s' is not a function declaration" % code,
**exception_kwargs
)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException(
"'**%s' keyword argument not allowed here"
% self.kwargnames[-1],
**exception_kwargs
)
def get_argument_expressions(self, as_call=False):
"""Return the argument declarations of this FunctionDecl as a printable
list.
By default the return value is appropriate for writing in a ``def``;
set `as_call` to true to build arguments to be passed to the function
instead (assuming locals with the same names as the arguments exist).
"""
namedecls = []
# Build in reverse order, since defaults and slurpy args come last
argnames = self.argnames[::-1]
kwargnames = self.kwargnames[::-1]
defaults = self.defaults[::-1]
kwdefaults = self.kwdefaults[::-1]
# Named arguments
if self.kwargs:
namedecls.append("**" + kwargnames.pop(0))
for name in kwargnames:
# Keyword-only arguments must always be used by name, so even if
# this is a call, print out `foo=foo`
if as_call:
namedecls.append("%s=%s" % (name, name))
elif kwdefaults:
default = kwdefaults.pop(0)
if default is None:
# The AST always gives kwargs a default, since you can do
# `def foo(*, a=1, b, c=3)`
namedecls.append(name)
else:
namedecls.append(
"%s=%s"
% (name, pyparser.ExpressionGenerator(default).value())
)
else:
namedecls.append(name)
# Positional arguments
if self.varargs:
namedecls.append("*" + argnames.pop(0))
for name in argnames:
if as_call or not defaults:
namedecls.append(name)
else:
default = defaults.pop(0)
namedecls.append(
"%s=%s"
% (name, pyparser.ExpressionGenerator(default).value())
)
namedecls.reverse()
return namedecls
@property
def allargnames(self):
return tuple(self.argnames) + tuple(self.kwargnames)
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__(
"def ANON(%s):pass" % code, **kwargs
)<|fim▁end|>
|
f.visit(expr)
class PythonFragment(PythonCode):
|
<|file_name|>items.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html<|fim▁hole|>class TiebaItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
class ThreadItem(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
preview = scrapy.Field()
author = scrapy.Field()
tieba = scrapy.Field()
date = scrapy.Field()
keywords = scrapy.Field()
class NoneItem(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
preview = scrapy.Field()
author = scrapy.Field()
tieba = scrapy.Field()
date = scrapy.Field()
keywords = scrapy.Field()<|fim▁end|>
|
import scrapy
|
<|file_name|>GlobalLocation.java<|end_file_name|><|fim▁begin|>package jqian.sootex.location;
import soot.SootField;
import soot.Type;
/** Model a static class field */
public class GlobalLocation extends Location{
protected final SootField _field;
GlobalLocation(SootField field){
this._field=field;
}
<|fim▁hole|>
public String toString(){
return _field.getDeclaringClass().getShortName()+"."+_field.getName();
}
public Type getType(){
return _field.getType();
}
}<|fim▁end|>
|
public SootField getSootField(){
return _field;
}
|
<|file_name|>create-module.js<|end_file_name|><|fim▁begin|>'use strict'
let React = require('react'),
ReactDOM = require('react-dom'),
$ = require('jquery'),
d3 = require('d3'),
storage = window.localStorage,
ProgressBar = require('react-progress-bar-plus'),
CheckOptionBox = require('../components/check-option-box'),
CandleStickStockScaleChartWithVolumeHistogramV3 = require('../components/candle-stick-stock'),
alert = require('../components/alert'),
PageHeader = require('../components/page-header');
import { Button, Input, Modal, Grid, Row, Col, Glyphicon, Panel } from 'react-bootstrap';
let parseDate = d3.time.format("%Y-%m-%d").parse;
let formatDate = d3.time.format("%Y-%m-%d");
// 筛选选项区域
const FilterOptionsBox = React.createClass({
// 过滤股票按钮点击事件
handleFilterClick: function handleFilterClick(event) {
this.setState({
percent: 0,
autoIncrement: true,
intervalTime: 100
});
$.ajax({
url: this.props.filterUrl,
dataType: 'json',
data: {
dealDate: this.state.dealDate,
stockId: this.state.stockId,
rsi: this.state.rsi,
top: this.state.top,
priceScale: this.state.priceScale,
crossStar: this.state.crossStar,
crossStarCount: this.state.crossStarCount,
inconUpPriceScale: this.state.inconUpPriceScale
},
cache: false,
success: function(data) {
this.setState({'filteredData': data});
this.props.onFilterClick(data);
this.setState({
percent: -1
});
}.bind(this),
error: function(xhr, status, err) {
console.error(this.props.url, status, err.toString());
this.setState({
percent: -1
});
}.bind(this)
});
},
// 保存股票模型按钮点击事件
handleSaveModuleClick: function handleSaveModuleClick(event) {
this.setState({
percent: 0,
autoIncrement: true,
intervalTime: 100
});
alert.clearMessage();
if (storage.getItem('user.token')) {
$.ajax({
url: this.props.saveModuleUrl,
type: 'POST',
dataType: 'json',
headers: {authorization: storage.getItem('user.token')},
data: {
moduleId: this.state.moduleId,
options: JSON.stringify({
dealDate: this.state.dealDate,
stockId: this.state.stockId,
rsi: this.state.rsi,
top: this.state.top,
priceScale: this.state.priceScale,
crossStar: this.state.crossStar,
crossStarCount: this.state.crossStarCount,
inconUpPriceScale: this.state.inconUpPriceScale
}
)},
cache: false,
success: function(data) {
if (data.ok) {
alert.setMessage('info', '保存成功');
this.setState({moduleId: data.result})
} else {
alert.setMessage('error', data.message);
}
this.setState({
percent: -1
});
}.bind(this),
error: function(xhr, status, err) {
console.error(this.props.url, status, err.toString());
this.setState({
percent: -1
});
}.bind(this)
});
} else {
this.setState({
percent: -1
});
alert.setMessage('error', '请先登录');
}
},
// 模拟买入按钮点击事件
handleMockBuyClick: function handleMockBuyClick(event) {
this.setState({
percent: 0,
autoIncrement: true,
intervalTime: 100
});
$.ajax({
url: this.props.mockBuyUrl,
dataType: 'json',
type: 'POST',
data: {jsonData: JSON.stringify(this.state)},
cache: false,
success: function(data) {
console.log(data);
let diffPercent = (Math.round(data.diff / data.buyAmount * 10000) / 100);
this.setState({
showBuyResult: true,
buyAmount: data.buyAmount,
holdDay: data.holdDay,
saleAmount: data.saleAmount,
diff: data.diff,
diffPercent: diffPercent,
percent: -1
});
}.bind(this),
error: function(xhr, status, err) {
console.error(this.props.url, status, err.toString());
this.setState({
percent: -1
});
}.bind(this)
});
},
handleDealDateChange: function(data) {
this.setState({dealDate: data});
},
handleStockIdChange: function(data) {
this.setState({stockId: data});
},
handleTopChange: function(data) {
this.setState({top: data});
},
handleRSIChange: function(data) {
this.setState({rsi: data});
},
handleHoldDateChange: function(data) {
this.setState({holdDay: data});
},
handlePriceScaleChange: function(data) {
this.setState({priceScale: data});
},
handleCrossStarChange: function(data) {
this.setState({crossStar: data});
},
handleCrossStarCountChange: function(data) {
this.setState({crossStarCount: data});
},
handleInconUpPriceScaleChange: function(data) {
this.setState({inconUpPriceScale: data});
},
componentDidMount() {
if (this.state.moduleId) {
this.setState({
percent: 0,
autoIncrement: true,
intervalTime: 100
});
$.ajax({
url: this.props.getModuleUrl + this.state.moduleId,
dataType: 'json',
headers: {authorization: storage.getItem('user.token')},
cache: false,
success: function(result) {
if (result.ok) {
if (this.isMounted()) {
this.setState(result.data.options);
}
} else {
alert.setMessage('error', result.message);
}
this.setState({
percent: -1,
initDone: true
});
}.bind(this),
error: function(xhr, status, err) {
console.error(this.props.url, status, err.toString());
this.setState({
percent: -1,
initDone: true
});
}.bind(this)
});
} else {
this.setState({initDone: true});
}
},
getInitialState: function() {
return {
initDone: false,
moduleId: this.props.moduleId,
showBuyResult: false,
buyAmount: 0,
holdDay: 0,
saleAmount: 0,
diff: 0,
percent: -1,
autoIncrement: false,
intervalTime: 200
};
},
close() {
this.setState({ showBuyResult: false });
},
open() {
this.setState({ showBuyResult: true });
},
render: function() {
if (this.state.initDone) {
return (
<Grid>
<ProgressBar percent={this.state.percent}
autoIncrement={this.state.autoIncrement}
intervalTime={this.state.intervalTime} />
<alert.Alert />
<PageHeader.PageHeader title='创建模型' />
<Row className="show-grid">
<Col lg={12}>
<h4>股票筛选</h4>
</Col>
</Row>
<Panel collapsible defaultExpanded header="基础设置">
<CheckOptionBox label="指定交易日期" holder="请输入交易日期"
defaultChecked={false}
defaultValue={this.state.dealDate}
help="如果不输入则默认为当前日期。样例:2015-01-01"
onContentChange={this.handleDealDateChange} />
<CheckOptionBox label="指定股票代码" holder="请输入股票代码"
defaultChecked={false}
defaultValue={this.state.stockId}
help="样例:000001"
onContentChange={this.handleStockIdChange} />
<CheckOptionBox label="龙头股"
holder="请输入日期范围、涨幅和取板块头X个股票"
defaultChecked={false}
defaultValue={this.state.top}
help="样例:2015-12-01,2015-12-31,7,3"
onContentChange={this.handleTopChange} />
</Panel>
<Panel collapsible defaultExpanded header="涨跌相关">
<CheckOptionBox label="连续涨跌规律"
holder="请输入连续涨跌规律"
defaultChecked={false}
defaultValue={this.state.priceScale}
help="样例:2015-12-01,2015-12-31,7,7,-7(解释:从2015-12-01日到2015-12-31日,其中有一天涨幅超过7%,随后第二天涨幅超过7%,随后第三天[跌]幅超过7%)"
onContentChange={this.handlePriceScaleChange} />
<CheckOptionBox label="间断上涨规律"
holder="请输入间断上涨规律。"
defaultChecked={false}
defaultValue={this.state.inconUpPriceScale}
help="样例:2015-12-01,2015-12-31,2,7(解释:从2015-12-01日到2015-12-31日,出现2次上涨幅度在7%以上的股票)"
onContentChange={this.handleInconUpPriceScaleChange} />
</Panel>
<Panel collapsible defaultExpanded header="十字星相关">
<CheckOptionBox label="十字星定义" holder="请输入十字星定义"
defaultChecked={false}
defaultValue={this.state.crossStar}
help="样例:2015-11-01,2015-11-30,0.5,1,1(解释:从2015-12-01日到2015-12-31日,开盘价和收盘价之差在0.5%以内,上影线在1%以上,下影线在1%以上)"
onContentChange={this.handleCrossStarChange} />
<CheckOptionBox label="间断出现十字星数量" holder="请输入间断十字星数量"
defaultChecked={false}
defaultValue={this.state.crossStarCount}
help="样例:3(解释:大于等于3个十字星,不输入则默认为1)"
onContentChange={this.handleCrossStarCountChange} />
</Panel>
<Panel collapsible defaultExpanded header="指数相关">
<CheckOptionBox label="RSI1" holder="请输入范围"
defaultChecked={false}
defaultValue={this.state.rsi}
help="样例:>80 or <=20"
onContentChange={this.handleRSIChange} />
</Panel>
<Button bsStyle="default" onClick={this.handleFilterClick}>
<Glyphicon glyph="filter" /> 过滤股票
</Button>
<Button bsStyle="danger" onClick={this.handleSaveModuleClick} style={{marginLeft: 10}}>
<Glyphicon glyph="save-file" /> 保存模型
</Button>
<Row className="show-grid">
<Col lg={12}>
<hr />
<h4>模拟买入筛选股票</h4>
</Col>
</Row>
<CheckOptionBox label="持有天数" holder="请输入持有天数(如果不输入则默认为7天)。样例:7"
defaultChecked={false}
onContentChange={this.handleHoldDateChange} />
<Button bsStyle="danger" onClick={this.handleMockBuyClick}>
<Glyphicon glyph="yen" /> 模拟买入
</Button>
<hr />
<Modal show={this.state.showBuyResult} onHide={this.close}>
<Modal.Header closeButton>
<Modal.Title>模拟买入结果</Modal.Title>
</Modal.Header>
<Modal.Body>
<p>您的成绩如下:</p>
<p>每股各买入100股,总买入金额为:{this.state.buyAmount}元</p>
<p>持有{this.state.holdDay}天后卖出,总卖出金额为:{this.state.saleAmount}元</p>
<p>结果为:{this.state.diff}元 / {this.state.diffPercent}%</p>
</Modal.Body>
<Modal.Footer>
<Button onClick={this.close}>关闭</Button>
</Modal.Footer>
</Modal>
</Grid>
);
}
return (
<Grid>
<ProgressBar percent={this.state.percent}
autoIncrement={this.state.autoIncrement}
intervalTime={this.state.intervalTime} />
<alert.Alert />
</Grid>
);
}
});
// 股票列表
import {Table, Column, Cell} from 'fixed-data-table';
const DateCell = ({rowIndex, data, col}) => (
<Cell>
{data[rowIndex][col]}
</Cell>
);
<|fim▁hole|>const TextCell = ({rowIndex, data, col}) => (
<Cell>
{data[rowIndex][col]}
</Cell>
);
const StockTable = React.createClass({
handleNextTransDayChange: function(data) {
this.setState({nextTransDay: data});
},
getLastTransData: function(needShowMockBuyResult) {
if (this.state.selectedStockId) {
let nextTransDate = new Date();
// 检查是否指定随后天数,没有就默认为当天
if (this.state.nextTransDay) {
nextTransDate = new Date((new Date(parseDate(this.state.selectedDealDate))).getTime() +
this.state.nextTransDay * 24 * 60 * 60 * 1000);
}
this.setState({
percent: 0,
autoIncrement: true,
intervalTime: 100
});
$.ajax({
url: "/api/stock/transactions",
dataType: 'json',
data: {
dealDate: formatDate(nextTransDate),
stockId: this.state.selectedStockId
},
cache: false,
success: function(data) {
data.forEach((d, i) => {
d.date = new Date(parseDate(d.date).getTime());
d.open = +d.open;
d.high = +d.high;
d.low = +d.low;
d.close = +d.close;
d.volume = (+d.volume) / 100; // 需要将股转成手
// console.log(d);
});
/* change the type from hybrid to svg to compare the performance between svg and canvas */
ReactDOM.render(
<CandleStickStockScaleChartWithVolumeHistogramV3 data={data} type="hybrid" />,
document.getElementById("chart"));
if (needShowMockBuyResult) {
console.log(data[data.length - 1]);
console.log(this.state.selectedClose);
let buyAmount = Math.round(this.state.selectedClose * 100);
let saleAmount = data[data.length - 1].close * 100;
let diff = saleAmount - buyAmount;
let diffPercent = (Math.round(diff / buyAmount * 10000) / 100);
let holdDay = Math.ceil((nextTransDate - parseDate(this.state.selectedDealDate)) /
(1000 * 3600 * 24));
this.setState({
showBuyResult: true,
buyAmount: buyAmount,
holdDay: holdDay,
saleAmount: saleAmount,
diff: diff,
diffPercent: diffPercent,
percent: -1
});
}
}.bind(this),
error: function(xhr, status, err) {
console.error("/api/stock/transactions", status, err.toString());
this.setState({
percent: -1
});
}.bind(this)
});
}
},
handleMockBuyClick: function() {
this.getLastTransData(true);
},
handleGetLastTransDataClick: function() {
this.getLastTransData(false);
},
getInitialState: function() {
return {
selectedStockId: null, selectedStockName: "", selectedDealDate: null,
showBuyResult: false, buyAmount: 0, holdDay: 0, saleAmount: 0, diff: 0
};
},
close() {
this.setState({ showBuyResult: false });
},
open() {
this.setState({ showBuyResult: true });
},
render: function() {
let self = this;
return (
<Grid>
<ProgressBar percent={this.state.percent}
autoIncrement={this.state.autoIncrement}
intervalTime={this.state.intervalTime} />
<Row>
<Col lg={12}>
<Table
rowHeight={50}
rowsCount={this.props.data.length}
width={1000}
height={400}
headerHeight={50}
rowClassNameGetter={function(rowIndex) { return ''; }}
onRowClick={
function(e, rowIndex) {
self.setState({
selectedStockId: self.props.data[rowIndex].stock_id,
selectedStockName: self.props.data[rowIndex].stock_name,
selectedDealDate: self.props.data[rowIndex].date,
selectedClose: self.props.data[rowIndex].close,
percent: 0,
autoIncrement: true,
intervalTime: 100
});
$.ajax({
url: "/api/stock/transactions",
dataType: 'json',
data: {
dealDate: self.props.data[rowIndex].date,
stockId: self.props.data[rowIndex].stock_id
},
cache: false,
success: function(data) {
data.forEach((d, i) => {
d.date = new Date(parseDate(d.date).getTime());
d.open = +d.open;
d.high = +d.high;
d.low = +d.low;
d.close = +d.close;
d.volume = (+d.volume) / 100; // 需要将股转成手
// console.log(d);
});
console.log(data);
/* change the type from hybrid to svg to compare the performance between svg and canvas */
ReactDOM.render(
<CandleStickStockScaleChartWithVolumeHistogramV3 data={data} type="hybrid" />,
document.getElementById("chart"));
self.setState({
percent: -1
});
}.bind(this),
error: function(xhr, status, err) {
console.error("/api/stock/transactions", status, err.toString());
self.setState({
percent: -1
});
}.bind(this)
});
}
}
{...this.props}>
<Column
header={<Cell>股票代码</Cell>}
cell={<TextCell data={this.props.data} col="stock_id" />}
width={100}
/>
<Column
header={<Cell>股票名称</Cell>}
cell={<TextCell data={this.props.data} col="stock_name" />}
width={100}
/>
<Column
header={<Cell>所属行业</Cell>}
cell={<TextCell data={this.props.data} col="industry" />}
width={100}
/>
<Column
header={<Cell>统计最后日期</Cell>}
cell={<DateCell data={this.props.data} col="date" />}
width={140}
/>
<Column
header={<Cell>收盘价</Cell>}
cell={<TextCell data={this.props.data} col="close" />}
width={100}
/>
<Column
header={<Cell>RSI1</Cell>}
cell={<TextCell data={this.props.data} col="rsi1" />}
width={100}
/>
<Column
header={<Cell>RSI2</Cell>}
cell={<TextCell data={this.props.data} col="rsi2" />}
width={100}
/>
<Column
header={<Cell>RSI3</Cell>}
cell={<TextCell data={this.props.data} col="rsi3" />}
width={100}
/>
</Table>
<small>* 点击股票可以查看该股票的K线图</small>
</Col>
</Row>
<Row>
<Col lg={12}>
<h3>{this.state.selectedStockName}</h3>
<div id="chart" ></div>
<hr />
<CheckOptionBox label="查看随后X天走势"
holder="请输入想要看的随后走势天数(如果不输入则默认为到当前日期)。样例:7"
defaultChecked={false}
onContentChange={this.handleNextTransDayChange} />
<Button onClick={this.handleGetLastTransDataClick}>
<Glyphicon glyph="search" /> 查看随后走势
</Button>
<Button bsStyle="danger" onClick={this.handleMockBuyClick} style={{marginLeft: 10}}>
<Glyphicon glyph="yen" /> 查看随后走势并模拟买入
</Button>
</Col>
</Row>
<Modal show={this.state.showBuyResult} onHide={this.close}>
<Modal.Header closeButton>
<Modal.Title>模拟买入结果</Modal.Title>
</Modal.Header>
<Modal.Body>
<p>您的成绩如下:</p>
<p>买入【{this.state.selectedStockName}】100股,买入金额为:{this.state.buyAmount}元</p>
<p>持有{this.state.holdDay}天后卖出,卖出金额为:{this.state.saleAmount}元</p>
<p>结果为:{this.state.diff}元 / {this.state.diffPercent}%</p>
</Modal.Body>
<Modal.Footer>
<Button onClick={this.close}>关闭</Button>
</Modal.Footer>
</Modal>
</Grid>
);
}
});
const FilterStockBox = React.createClass({
handleFilterClick: function(data) {
this.setState({data: data});
},
getInitialState: function() {
return {
data: []
};
},
render: function() {
return (
<div className="filterStockBox">
<FilterOptionsBox
moduleId={this.props.moduleId}
getModuleUrl="/api/module/"
filterUrl="/api/stock/filter"
mockBuyUrl="/api/stock/mockBuy"
saveModuleUrl="/api/module/save"
onFilterClick={this.handleFilterClick} />
<Row>
<Col lg={12}>
<StockTable data={this.state.data} />
</Col>
</Row>
</div>
);
}
});
const CreateModule = React.createClass({
render: function() {
return (
<FilterStockBox moduleId={this.props.params.moduleId} />
);
}
});
module.exports = CreateModule;<|fim▁end|>
| |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"github.com/dr4ke616/GoProxy/server"
"log"
"os"
)
func main() {
server.DEVIL = false
<|fim▁hole|> var err error
proxy := server.Proxy{}
// Load the proxy from a config file
c := "/etc/goproxy/config.json"
if server.DEVIL {
c = "config.json"
}
err = server.LoadFromConfig(&proxy, c)
if err != nil {
log.Fatal(err)
os.Exit(1)
}
// Start the Proxy server
err = server.StartProxy(&proxy)
if err != nil {
log.Fatal(err)
os.Exit(1)
}
}<|fim▁end|>
| |
<|file_name|>nzbget.py<|end_file_name|><|fim▁begin|>from flask import render_template, jsonify, request
from jsonrpclib import jsonrpc
import base64
import urllib
from maraschino import app, logger
from maraschino.tools import *
def nzbget_http():
if get_setting_value('nzbget_https') == '1':
return 'https://'
else:
return 'http://'
def nzbget_auth():
return 'nzbget:%s@' % (get_setting_value('nzbget_password'))
def nzbget_url():
return '%s%s%s:%s' % (nzbget_http(), \
nzbget_auth(), \
get_setting_value('nzbget_host'), \
get_setting_value('nzbget_port'))
def nzbget_exception(e):
logger.log('NZBGet :: EXCEPTION -- %s' % e, 'DEBUG')
@app.route('/xhr/nzbget/')
@requires_auth
def xhr_nzbget():
downloads = status = nzbget = None
logger.log('NZBGet :: Getting download list', 'INFO')
try:
nzbget = jsonrpc.ServerProxy('%s/jsonrpc' % nzbget_url())
status = nzbget.status()
downloads = nzbget.listgroups()
except Exception as e:
nzbget_exception(e)
logger.log('NZBGet :: Getting download list (DONE)', 'INFO')
return render_template('nzbget/queue.html',
nzbget=status,
downloads=downloads,
)
@app.route('/xhr/nzbget/queue/<action>/')
@requires_auth
def queue_action_nzbget(action):
status = False
logger.log('NZBGet :: Queue action: %s' % action, 'INFO')
try:
nzbget = jsonrpc.ServerProxy('%s/jsonrpc' % nzbget_url())
if 'resume' in action:
status = nzbget.resume()
elif 'pause' in action:
status = nzbget.pause()
except Exception as e:
nzbget_exception(e)
return jsonify({'success': status})
@app.route('/xhr/nzbget/queue/add/', methods=['POST'])
@requires_auth
def queue_add_nzbget():
status = False
if len(nzb):
try:
nzbget = jsonrpc.ServerProxy('%s/jsonrpc' % nzbget_url())
nzb = request.form['url']
nzb = urllib.urlopen(nzb).read()
status = nzbget.append('test', '', False, base64.encode(nzb))
except Exception as e:
nzbget_exception(e)
return jsonify({'success': status})
@app.route('/xhr/nzbget/individual/<int:id>/<action>/')
@requires_auth
def individual_action_nzbget(id, action):
status = False
logger.log('NZBGet :: Item %s action: %s' % (id, action), 'INFO')
if 'resume' in action:
action = 'GroupResume'
elif 'pause' in action:
action = 'GroupPause'
elif 'delete' in action:
action = 'GroupDelete'
try:
nzbget = jsonrpc.ServerProxy('%s/jsonrpc' % nzbget_url())
status = nzbget.editqueue(action, 0, '', id)
except Exception as e:
nzbget_exception(e)
return jsonify({'success': status, 'id': id, 'action': action})
@app.route('/xhr/nzbget/set_speed/<int:speed>/')
@requires_auth
def set_speed_nzbget(speed):
logger.log('NZBGet :: Setting speed limit: %s' % speed, 'INFO')<|fim▁hole|> status = nzbget.rate(speed)
except Exception as e:
nzbget_exception(e)
return jsonify({'success': status})<|fim▁end|>
|
try:
nzbget = jsonrpc.ServerProxy('%s/jsonrpc' % nzbget_url())
|
<|file_name|>Torus_bin.js<|end_file_name|><|fim▁begin|>// Converted from: ../../examples/obj/torus/Torus.obj
// vertices: 576
// faces: 576
// materials: 1
//
<|fim▁hole|>// Generated with OBJ -> Three.js converter
// http://github.com/alteredq/three.js/blob/master/utils/exporters/convert_obj_three.py
var model = {
"version" : 1,
"materials": [ {
"DbgColor" : 15658734,
"DbgIndex" : 0,
"DbgName" : "(null)"
}],
"buffers": "Torus_bin.bin"
};
postMessage( model );
close();<|fim▁end|>
| |
<|file_name|>struct_defs.rs<|end_file_name|><|fim▁begin|>// This test case tests the incremental compilation hash (ICH) implementation
// for struct definitions.
// The general pattern followed here is: Change one thing between rev1 and rev2
// and make sure that the hash has changed, then change nothing between rev2 and
// rev3 and make sure that the hash has not changed.
// We also test the ICH for struct definitions exported in metadata. Same as
// above, we want to make sure that the change between rev1 and rev2 also
// results in a change of the ICH for the struct's metadata, and that it stays
// the same between rev2 and rev3.
// build-pass (FIXME(62277): could be check-pass?)
// revisions: cfail1 cfail2 cfail3 cfail4 cfail5 cfail6
// compile-flags: -Z query-dep-graph
// [cfail1]compile-flags: -Zincremental-ignore-spans
// [cfail2]compile-flags: -Zincremental-ignore-spans
// [cfail3]compile-flags: -Zincremental-ignore-spans
// [cfail4]compile-flags: -Zincremental-relative-spans
// [cfail5]compile-flags: -Zincremental-relative-spans
// [cfail6]compile-flags: -Zincremental-relative-spans
#![allow(warnings)]
#![feature(rustc_attrs)]
#![crate_type="rlib"]
// Layout ----------------------------------------------------------------------
#[cfg(any(cfail1,cfail4))]
pub struct LayoutPacked;
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
#[repr(packed)]
pub struct LayoutPacked;
#[cfg(any(cfail1,cfail4))]
struct LayoutC;
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
#[repr(C)]
struct LayoutC;
// Tuple Struct Change Field Type ----------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct TupleStructFieldType(i32);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
// Note that changing the type of a field does not change the type of the struct or enum, but
// adding/removing fields or changing a fields name or visibility does.
struct TupleStructFieldType(
u32
);
// Tuple Struct Add Field ------------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct TupleStructAddField(i32);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct TupleStructAddField(
i32,
u32
);
// Tuple Struct Field Visibility -----------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct TupleStructFieldVisibility(char);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct TupleStructFieldVisibility(pub char);
<|fim▁hole|>#[cfg(any(cfail1,cfail4))]
struct RecordStructFieldType { x: f32 }
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
// Note that changing the type of a field does not change the type of the struct or enum, but
// adding/removing fields or changing a fields name or visibility does.
struct RecordStructFieldType {
x: u64
}
// Record Struct Field Name ----------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct RecordStructFieldName { x: f32 }
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct RecordStructFieldName { y: f32 }
// Record Struct Add Field -----------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct RecordStructAddField { x: f32 }
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct RecordStructAddField {
x: f32,
y: () }
// Record Struct Field Visibility ----------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct RecordStructFieldVisibility { x: f32 }
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct RecordStructFieldVisibility {
pub x: f32
}
// Add Lifetime Parameter ------------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct AddLifetimeParameter<'a>(&'a f32, &'a f64);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of,generics_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of,generics_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct AddLifetimeParameter<'a, 'b>(&'a f32, &'b f64);
// Add Lifetime Parameter Bound ------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct AddLifetimeParameterBound<'a, 'b>(&'a f32, &'b f64);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct AddLifetimeParameterBound<'a, 'b: 'a>(
&'a f32,
&'b f64
);
#[cfg(any(cfail1,cfail4))]
struct AddLifetimeParameterBoundWhereClause<'a, 'b>(&'a f32, &'b f64);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct AddLifetimeParameterBoundWhereClause<'a, 'b>(
&'a f32,
&'b f64)
where 'b: 'a;
// Add Type Parameter ----------------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct AddTypeParameter<T1>(T1, T1);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of,generics_of,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,type_of,generics_of,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct AddTypeParameter<T1, T2>(
// The field contains the parent's Generics, so it's dirty even though its
// type hasn't changed.
T1,
T2
);
// Add Type Parameter Bound ----------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct AddTypeParameterBound<T>(T);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct AddTypeParameterBound<T: Send>(
T
);
#[cfg(any(cfail1,cfail4))]
struct AddTypeParameterBoundWhereClause<T>(T);
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct AddTypeParameterBoundWhereClause<T>(
T
) where T: Sync;
// Empty struct ----------------------------------------------------------------
// Since we cannot change anything in this case, we just make sure that the
// fingerprint is stable (i.e., that there are no random influences like memory
// addresses taken into account by the hashing algorithm).
// Note: there is no #[cfg(...)], so this is ALWAYS compiled
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
pub struct EmptyStruct;
// Visibility ------------------------------------------------------------------
#[cfg(any(cfail1,cfail4))]
struct Visibility;
#[cfg(not(any(cfail1,cfail4)))]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
pub struct Visibility;
struct ReferencedType1;
struct ReferencedType2;
// Tuple Struct Change Field Type Indirectly -----------------------------------
mod tuple_struct_change_field_type_indirectly {
#[cfg(any(cfail1,cfail4))]
use super::ReferencedType1 as FieldType;
#[cfg(not(any(cfail1,cfail4)))]
use super::ReferencedType2 as FieldType;
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct TupleStruct(
FieldType
);
}
// Record Struct Change Field Type Indirectly -----------------------------------
mod record_struct_change_field_type_indirectly {
#[cfg(any(cfail1,cfail4))]
use super::ReferencedType1 as FieldType;
#[cfg(not(any(cfail1,cfail4)))]
use super::ReferencedType2 as FieldType;
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct RecordStruct {
_x: FieldType
}
}
trait ReferencedTrait1 {}
trait ReferencedTrait2 {}
// Change Trait Bound Indirectly -----------------------------------------------
mod change_trait_bound_indirectly {
#[cfg(any(cfail1,cfail4))]
use super::ReferencedTrait1 as Trait;
#[cfg(not(any(cfail1,cfail4)))]
use super::ReferencedTrait2 as Trait;
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct Struct<T: Trait>(T);
}
// Change Trait Bound Indirectly In Where Clause -------------------------------
mod change_trait_bound_indirectly_in_where_clause {
#[cfg(any(cfail1,cfail4))]
use super::ReferencedTrait1 as Trait;
#[cfg(not(any(cfail1,cfail4)))]
use super::ReferencedTrait2 as Trait;
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
#[rustc_clean(except="hir_owner,hir_owner_nodes,predicates_of", cfg="cfail5")]
#[rustc_clean(cfg="cfail6")]
struct Struct<T>(T) where T : Trait;
}<|fim▁end|>
|
// Record Struct Field Type ----------------------------------------------------
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># encoding: utf-8
"""
__init__.py<|fim▁hole|><|fim▁end|>
|
Created by Thomas Mangin on 2010-01-15.
Copyright (c) 2009-2015 Exa Networks. All rights reserved.
"""
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* io::mod.rs */
use core::mem::volatile_store;
use kernel::sgash;
mod font;
/* http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0225d/BBABEGGE.html */
pub static UART0: *mut u32 = 0x101f1000 as *mut u32;
pub static UART0_IMSC: *mut u32 = (0x101f1000 + 0x038) as *mut u32;
#[allow(dead_code)]
pub static VIC_INTENABLE: *mut u32 = (0x10140000 + 0x010) as *mut u32;
pub static mut CURSOR_X: u32 = 0;
pub static mut CURSOR_Y: u32 = 0;
pub static CURSOR_HEIGHT: u32 = 16;
pub static CURSOR_WIDTH: u32 = 8;
pub static mut CURSOR_COLOR: u32 = 0x000000FF;
pub static mut FG_COLOR: u32 = 0x00FFFFFF;
pub static mut BG_COLOR: u32 = 0xF0000000;
pub static mut CURSOR_BUFFER: [u32, ..8*16] = [0x00FF0000, ..8*16];
pub static mut SAVE_X: u32 = 0;
pub static mut SAVE_Y: u32 = 0;
pub static START_ADDR: u32 = 1024*1024;
pub static mut SCREEN_WIDTH: u32 = 0;
pub static mut SCREEN_HEIGHT: u32 = 0;
pub unsafe fn init(width: u32, height: u32)
{
SCREEN_WIDTH = width;
SCREEN_HEIGHT= height;
sgash::init();
/* For the following magic values, see
* http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0225d/CACHEDGD.html
*/
// 800x600
// See http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0225d/CACCCFBF.html
if (SCREEN_WIDTH == 800 && SCREEN_HEIGHT == 600) {
ws(0x10000010, 0x2CAC);
ws(0x10120000, 0x1313A4C4);
ws(0x10120004, 0x0505F657);
ws(0x10120008, 0x071F1800);
/* See http://forum.osdev.org/viewtopic.php?p=195000 */
ws(0x10120010, START_ADDR);
/* See http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0161e/I911024.html */
ws(0x10120018, 0x82B);
}
// 640x480
// See http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.dui0225d/CACCCFBF.html
else if (SCREEN_WIDTH == 640 && SCREEN_HEIGHT == 480) {
ws(0x10000010, 0x2C77);
ws(0x10120000, 0x3F1F3F9C);
ws(0x10120004, 0x090B61DF);
ws(0x10120008, 0x067F1800);
/* See http://forum.osdev.org/viewtopic.php?p=195000 */
ws(0x10120010, START_ADDR);
/* See http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0161e/I911024.html */
ws(0x10120018, 0x82B);
}
set_bg(0x000000);
set_fg(0xFAFCFF);
set_cursor_color(0xFAFCFF);
fill_bg();
sgash::drawstr(&"sgash> ");<|fim▁hole|> volatile_store(address, c as u32);
}
pub unsafe fn scrollup()
{
let mut i = CURSOR_HEIGHT*SCREEN_WIDTH;
while i < (SCREEN_WIDTH*SCREEN_HEIGHT) {
*((START_ADDR + ((i-16*SCREEN_WIDTH)*4)) as *mut u32) = *((START_ADDR+(i*4)) as *u32);
i += 1;
}
i = 4*(SCREEN_WIDTH*SCREEN_HEIGHT - CURSOR_HEIGHT*SCREEN_WIDTH);
while i < 4*SCREEN_WIDTH*SCREEN_HEIGHT {
*((START_ADDR + (i as u32)) as *mut u32) = BG_COLOR;
i += 4;
}
CURSOR_X = 0x0u32;
CURSOR_Y -= CURSOR_HEIGHT;
}
pub unsafe fn draw_char(c: char)
{
if CURSOR_X+(SCREEN_WIDTH*CURSOR_Y) >= SCREEN_WIDTH*SCREEN_HEIGHT
{
scrollup();
}
let font_offset = (c as u8) - 0x20;
let map = font::bitmaps[font_offset];
let mut i = -1;
let mut j = 0;
let mut addr = START_ADDR + 4*(CURSOR_X + /*CURSOR_WIDTH +*/ 1 + SCREEN_WIDTH*CURSOR_Y + CURSOR_HEIGHT*SCREEN_WIDTH);
while j < CURSOR_HEIGHT {
while i < CURSOR_WIDTH {
//let addr = START_ADDR + 4*(CURSOR_X + CURSOR_WIDTH - i + SCREEN_WIDTH*(CURSOR_Y + j));
//let addr = START_ADDR + 4*(CURSOR_X + CURSOR_WIDTH + SCREEN_WIDTH*CURSOR_Y) - 4*i + 4*SCREEN_WIDTH*j
if ((map[j] >> 4*i) & 1) == 1 {
*(addr as *mut u32) = FG_COLOR;
}
else {
*(addr as *mut u32) = BG_COLOR;
}
addr+= 4;
i += 1;
}
addr -= 4*(SCREEN_WIDTH+i);
i = 0;
j += 1;
}
}
pub unsafe fn backup()
{
let mut i = 0;
let mut j = 0;
while j < CURSOR_HEIGHT
{
while i < CURSOR_WIDTH
{
let addr = START_ADDR + 4*(CURSOR_X + i + SCREEN_WIDTH*(CURSOR_Y + j));
CURSOR_BUFFER[i + j*8] = *(addr as *mut u32);
i += 1;
}
i = 0;
j += 1;
}
SAVE_X = CURSOR_X;
SAVE_Y = CURSOR_Y;
}
pub unsafe fn restore()
{
let mut i = 0;
let mut j = 0;
while j < CURSOR_HEIGHT
{
while i < CURSOR_WIDTH
{
let addr = START_ADDR + 4*(SAVE_X + i + SCREEN_WIDTH*(SAVE_Y + j));
*(addr as *mut u32) = CURSOR_BUFFER[i + j*8];
i += 1;
}
i = 0;
j += 1;
}
}
pub unsafe fn draw_cursor()
{
let mut i = 0;
let mut j = 0;
while j < CURSOR_HEIGHT
{
while i < CURSOR_WIDTH
{
let addr = START_ADDR + 4*(CURSOR_X + i + SCREEN_WIDTH*(CURSOR_Y + j));
*(addr as *mut u32) = CURSOR_COLOR;
i += 1;
}
i = 0;
j += 1;
}
}
pub unsafe fn paint(color: u32)
{
let mut i = 0;
while i < SCREEN_WIDTH*SCREEN_HEIGHT
{
*((START_ADDR as u32 + i*4) as *mut u32) = color;
i+=1;
}
}
pub unsafe fn fill_bg()
{
paint(BG_COLOR);
}
#[allow(dead_code)]
pub unsafe fn read(addr: u32) -> u32
{
*(addr as *mut u32)
}
pub unsafe fn ws(addr: u32, value: u32)
{
*(addr as *mut u32) = *(addr as *mut u32) | value;
}
#[allow(dead_code)]
pub unsafe fn wh(addr: u32, value: u32)
{
*(addr as *mut u32) = value;
}
pub unsafe fn set_fg(color: u32)
{
FG_COLOR = color;
}
pub unsafe fn set_bg(color: u32)
{
BG_COLOR = color;
}
pub unsafe fn set_cursor_color(color: u32)
{
CURSOR_COLOR = color;
}<|fim▁end|>
|
draw_cursor();
}
pub unsafe fn write_char(c: char, address: *mut u32) {
|
<|file_name|>webgl.js<|end_file_name|><|fim▁begin|>"use strict";
/***
* critical element webgl demo by Silke Rohn and Benedikt Klotz
* Source is the Basis applikation.
* Added and changed functionalities:
* @Benedikt: Objects, Lighting, particle systems, shader, blending and face culling
* @Silke: particle system, changes to particle systems (Elements)
*/
var webgl = {
gl: null,
objects: [],
time: 0.0,
life: 250,
objectAngle: 0,
debug: true,
maxAge: 5.0,
/**
* @author: Silke Rohn
**/
elements: {
HYDRO: -15,
KALIUM: -15,
TITAN: 35,
FERRUM: 10,
URAN: -15,
CARBON: 10,
MAGNESIUM: -20,
OXID: -5,
select: function() {
var select = document.getElementById("select");
var value = select.selectedIndex;
var objects = webgl.objects[2]
switch(value) {
case 0:
webgl.life += this.FERRUM;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
if (objects.colors[0] <= 0.9) {
for (var i = 0; i < objects.colors.length;i+=4) {
objects.colors[i] += 0.01;
objects.colors[i+1] += 0.01;
}
changed = true;
}
if(changed) {
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER,objects.colorObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.colors));
}
break;
case 1:
webgl.life += this.OXID;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
if (objects.colors[0] <= 0.9) {
for (var i = 0; i < objects.colors.length;i+=4) {
objects.colors[i+1] += 0.1;
}
changed = true;
}
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.colorObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.colors));
break;
case 2:
webgl.life += this.HYDRO;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
for (var i = 0; i < objects.velocities.length;i+=3) {
objects.velocities[i] += 0.01;
objects.velocities[i+1] += 0.01;
}
changed = true;
webgl.maxAge += 0.5;
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.velocityObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.velocities));
break;
case 3:
webgl.life += this.URAN;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
for (var i = 0; i < objects.colors.length;i+=3) {
objects.velocities[i] -= 0.01;
objects.velocities[i+2] -= 0.01;
}
changed = true;
if (webgl.maxAge <0.5){
window.alert("Die kritische Masse ist verschwunden!!!");
}
webgl.maxAge -= 0.5;
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.velocityObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.velocities));
break;
case 4:
webgl.life += this.CARBON;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
for (var i = 0; i < objects.velocities.length;i+=3) {
objects.velocities[i] -= 0.01;
objects.velocities[i+1] -= 0.01;
}
changed = true;
if (webgl.maxAge <0.5){
window.alert("Die kritische Masse ist verschwunden!!!");
}
webgl.maxAge -= 0.5;
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.velocityObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.velocities));
break;
case 5:
webgl.life += this.TITAN;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
if (objects.colors[2] >= 0.1) {
for (var i = 0; i < objects.colors.length;i+=4) {
objects.colors[i+2] -= 0.1;
}
changed = true;
}
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.colorObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.colors));
break;
case 6:
webgl.life += this.MAGNESIUM;
if (webgl.life <0){<|fim▁hole|>
var changed = false;
if ((objects.colors[0] >= 0.1) && (objects.colors[1] >= 0.1)) {
for (var i = 0; i < objects.colors.length;i+=4) {
objects.colors[i] -= 0.1;
objects.colors[i+1] -= 0.1;
}
changed = true;
}
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.colorObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.colors));
break;
case 7:
webgl.life += this.KALIUM;
if (webgl.life <0){
window.alert("Die kritische Masse ist explodiert!!!");
}
var changed = false;
for (var i = 0; i < webgl.objects[2].velocities.length;i+=3) {
objects.velocities[i] += 0.01;//Math.random()*.1;
objects.velocities[i+1] += 0.01;//Math.random()*.1;
objects.velocities[i+2] += 0.01;//Math.random()*.1;
}
changed = true;
if (webgl.maxAge <0.5){
window.alert("Die kritische Masse ist verschwunden!!!");
}
webgl.maxAge -= 0.5;
webgl.gl.bindBuffer(webgl.gl.ARRAY_BUFFER, objects.velocityObject);
webgl.gl.bufferSubData(webgl.gl.ARRAY_BUFFER,0,new Float32Array(objects.velocities));
break;
default:
console.log("Error: unknown element");
}
},
},
/**
* Encapsulates Projection and Viewing matrix and some helper functions.
**/
matrices: {
projection: new J3DIMatrix4(),
viewing: new J3DIMatrix4(),
viewingTranslate: {
x: 0,
y: 0,
z: 0
},
viewingRotations: {
x: 0,
y: 0,
z: 0
},
/**
* Initializes the Projection and the Viewing matrix.
* Projection uses perspective projection with fove of 30.0, aspect of 1.0, near 1 and far 10000.
* Viewing is set up as a translate of (0, 10, -50) and a rotate of 20 degrees around x and y axis.
**/
init: function () {
this.projection.perspective(30, 1.0, 1, 10000);
this.viewingTranslate = {
x: 0,
y: 0,
z: -5
};
this.viewingRotations = {
x: 50,
y: 0,
z: 0
};
this.updateViewing.call(this);
},
updateViewing: function() {
var t = this.viewingTranslate;
this.viewing = new J3DIMatrix4();
this.viewing.translate(t.x, t.y, t.z);
var r = this.viewingRotations;
this.viewing.scale(1.0,1.0,1.0) // 2.0,1.5,10.0
this.viewing.rotate(r.x, 1, 0, 0);
this.viewing.rotate(r.y, 0, 1, 0);
this.viewing.rotate(r.z, 0, 0, 1);
},
zoomIn: function() {
this.viewingTranslate.z -= 1;
this.updateViewing();
},
zoomOut: function() {
this.viewingTranslate.z += 1;
this.updateViewing();
},
moveLeft: function() {
this.viewingTranslate.x += 1;
this.updateViewing();
},
moveRight: function() {
this.viewingTranslate.x -= 1;
this.updateViewing();
},
moveUp: function() {
this.viewingTranslate.y += 1;
this.updateViewing();
},
moveDown: function() {
this.viewingTranslate.y -= 1;
this.updateViewing();
},
rotateXAxis: function(offset) {
this.viewingRotations.x = (this.viewingRotations.x + offset) % 360;
this.updateViewing();
},
rotateYAxis: function(offset) {
this.viewingRotations.y = (this.viewingRotations.y + offset) % 360;
this.updateViewing();
},
rotateZAxis: function(offset) {
this.viewingRotations.z = (this.viewingRotations.z + offset) % 360;
this.updateViewing();
},
reset: function() {
this.projection = new J3DIMatrix4();
this.viewing = new J3DIMatrix4();
this.init();
},
rotateObjectsLeft: function() {
webgl.objectAngle = (webgl.objectAngle - 1) % 360;
},
rotateObjectsRight: function() {
webgl.objectAngle = (webgl.objectAngle + 1) % 360;
},
},
/**
* This message checks whether one of the error flags is set and
* logs it to the console. If @p message is provided the message
* is printed together with the error code. This allows to track
* down an error by adding useful debug information to it.
*
* @param message Optional message printed together with the error.
**/
checkError: function (message) {
var errorToString = function(error) {
switch (error) {
case gl.NO_ERROR:
return "NO_ERROR";
case gl.INVALID_ENUM:
return "INVALID_ENUM";
case gl.INVALID_VALUE:
return "INVALID_VALUE";
case gl.INVALID_OPERATION:
return "INVALID_OPERATION";
case gl.OUT_OF_MEMORY:
return "OUT_OF_MEMORY";
}
return "UNKNOWN ERROR: " + error;
};
var gl = webgl.gl;
var error = gl.getError();
while (error !== gl.NO_ERROR) {
if (message) {
console.log(message + ": " + errorToString(error));
} else {
console.log(errorToString(error));
}
error = gl.getError();
}
},
/**
* This method logs information about the system:
* @li VERSION
* @li RENDERER
* @li VENDOR
* @li UNMASKED_RENDERER_WEBGL (Extension WEBGL_debug_renderer_info)
* @li UNMASKED_VENDOR_WEBGL (Extension WEBGL_debug_renderer_info)
* @li supportedExtensions
**/
systemInfo: function () {
var gl = webgl.gl;
console.log("Version: " + gl.getParameter(gl.VERSION));
console.log("Renderer: " + gl.getParameter(gl.RENDERER));
console.log("Vendor: " + gl.getParameter(gl.VENDOR));
var extensions = gl.getSupportedExtensions();
for (var i = 0; i < extensions.length; i++) {
if (extensions[i] == "WEBGL_debug_renderer_info") {
var renderInfo = gl.getExtension("WEBGL_debug_renderer_info");
if (renderInfo) {
console.log("Unmasked Renderer: " + gl.getParameter(renderInfo.UNMASKED_RENDERER_WEBGL));
console.log("Unmasked Vendor: " + gl.getParameter(renderInfo.UNMASKED_VENDOR_WEBGL));
}
}
}
console.log("Extensions: ");
console.log(extensions);
},
/**
* Creates a shader program out of @p vertex and @p fragment shaders.
*
* In case the linking of the shader program fails the programInfoLog is
* logged to the console.
*
* @param vertex The compiled and valid WebGL Vertex Shader
* @param fragment The compiled and valid WebGL Fragment Shader
* @returns The linked WebGL Shader Program
**/
createProgram: function (vertex, fragment) {
var gl = webgl.gl;
var shader = gl.createProgram();
gl.attachShader(shader, vertex);
gl.attachShader(shader, fragment);
gl.linkProgram(shader);
gl.validateProgram(shader);
var log = gl.getProgramInfoLog(shader);
if (log != "") {
console.log(log);
}
webgl.checkError("create Program");
return shader;
},
/**
* Generic method to render any @p object with any @p shader as TRIANGLES.
*
* This method can enable vertex, normal and texCoords depending on whether they
* are defined in the @p object and @p shader. Everything is added in a completely
* optional way, so there is no chance that an incorrect VertexAttribArray gets
* enabled.
*
* Changes by: Benedikt Klotz
*
* The @p object can provide the following elements:
* @li loaded: boolean indicating whether the object is completely loaded
* @li blending: boolean indicating whether blending needs to be enabled
* @li texture: texture object to bind if valid
* @li vertexObject: ARRAY_BUFFER with three FLOAT values (x, y, z)
* @li normalObject: ARRAY_BUFFER with three FLOAT values (x, y, z)
* @li texCoordObject: ARRAY_BUFFER with two FLOAT values (s, t)
* @li indexObject: ELEMENT_ARRAY_BUFFER
* @li numIndices: Number of indices in indexObject
* @li indexSize: The type of the index, must be one of GL_UNSIGNED_BYTE, GL_UNSIGNED_SHORT or GL_UNSIGNED_INT
*
* The @p shader can provide the following elements:
* @li vertexLocation: attribute location for the vertexObject
* @li normalLocation: attribute location for the normalObject
* @li texCoordsLocation: attribute location for the texCoordsLocation
*
* It is expected that the shader program encapsulated in @p shader is already in use.
**/
drawObject: function (gl, object, shader) {
if (object.loaded === false) {
// not yet loaded, don't render
return;
}
// Set Time
if(object.particle == true) {
gl.uniform1f(shader.timeLocation, this.time);
gl.uniform1f(shader.ageLocation, this.maxAge);
}
if (object.texture !== undefined) {
gl.bindTexture(gl.TEXTURE_2D, object.texture);
}
if (shader.vertexLocation !== undefined && object.vertexObject !== undefined) {
gl.enableVertexAttribArray(shader.vertexLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, object.vertexObject);
gl.vertexAttribPointer(shader.vertexLocation, 3, gl.FLOAT, false, 0, 0);
}
// start: Particle System related Attributes
if (shader.colorLocation !== undefined && object.colorObject !== undefined) {
gl.enableVertexAttribArray(shader.colorLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, object.colorObject);
gl.vertexAttribPointer(shader.colorLocation, 4, gl.FLOAT, false, 0, 0);
}
if (shader.velocityLocation !== undefined && object.velocityObject !== undefined) {
gl.enableVertexAttribArray(shader.velocityLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, object.velocityObject);
gl.vertexAttribPointer(shader.velocityLocation, 3, gl.FLOAT, false, 0, 0);
}
if (shader.startTimeLocation !== undefined && object.startTimeObject !== undefined) {
gl.enableVertexAttribArray(shader.startTimeLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, object.startTimeObject);
gl.vertexAttribPointer(shader.startTimeLocation, 1, gl.FLOAT, false, 0, 0);
}
if(object.particle == true) {
gl.drawArrays(gl.POINTS, 0, object.particleObject.length);
}
// End: Particle System related Attributes
if (shader.normalLocation !== undefined && object.normalObject !== undefined) {
gl.enableVertexAttribArray(shader.normalLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, object.normalObject);
gl.vertexAttribPointer(shader.normalLocation, 3, gl.FLOAT, false, 0, 0);
}
if (shader.texCoordsLocation !== undefined && object.texCoordObject !== undefined) {
gl.enableVertexAttribArray(shader.texCoordsLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, object.texCoordObject);
gl.vertexAttribPointer(shader.texCoordsLocation, 2, gl.FLOAT, false, 0, 0);
}
// Activate blending
if (object.blending !== undefined && object.blending === true) {
gl.blendFunc(gl.SRC_ALPHA, gl.ONE); //
gl.enable(gl.BLEND);
gl.uniform1f(shader.alphaLocation, 0.8);
gl.disable(gl.DEPTH_TEST);
// Set Alpha if blending for object is not activated
} else{
gl.uniform1f(shader.alphaLocation, 1.0);
}
// Activate culling
if(object.culling !== undefined && object.culling === true) {
gl.enable(gl.CULL_FACE);
gl.cullFace(gl.FRONT);
}
if (object.indexObject !== undefined && object.numIndices !== undefined && object.indexSize !== undefined) {
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, object.indexObject);
gl.drawElements(gl.TRIANGLES, object.numIndices, object.indexSize, 0);
}
gl.bindTexture(gl.TEXTURE_2D, null);
// Disbale Culling and enable Depth Test
if (object.blending !== undefined && object.blending === true) {
gl.enable(gl.DEPTH_TEST);
gl.disable(gl.BLEND);
}
// Disable Culling
if(object.culling !== undefined && object.culling === true) {
gl.disable(gl.CULL_FACE);
}
},
repaintLoop: {
frameRendering: false,
setup: function() {
var render = function(){
if (webgl.repaintLoop.frameRendering) {
return;
}
webgl.repaintLoop.frameRendering = true;
webgl.angle = (webgl.angle + 1) % 360;
for (var i = 0; i < webgl.objects.length; i++) {
var object = webgl.objects[i];
if (object.update === undefined) {
continue;
}
object.update.call(object);
}
webgl.time += 16/1000;
webgl.displayFunc.call(webgl);
webgl.repaintLoop.frameRendering = false;
window.requestAnimFrame(render);
};
window.requestAnimFrame(render);
render();
},
},
createShader: function (gl, type, source) {
var shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
var log = gl.getShaderInfoLog(shader);
if (log != "") {
console.log(log);
}
webgl.checkError("create shader " + source);
return shader;
},
/**
* Create a texture shader with Lighting enabled
*
* @author: Benedikt Klotz
**/
createObjectShader: function() {
var gl = this.gl;
var shader = {
program: -1,
loaded: false,
mvpLocation: -1,
textureLocation: -1,
vertexLocation: -1,
texCoordsLocation: -1,
lightDirLocation: -1,
create: function() {
if (this.vertexShader === undefined || this.fragmentShader === undefined) {
return;
}
var program = webgl.createProgram(this.vertexShader, this.fragmentShader);
this.program = program;
this.use();
// resolve locations
this.normalMatrixLocation = gl.getUniformLocation(program, "u_normalMatrix"),
this.lightDirLocation = gl.getUniformLocation(program, "u_lightDir"),
this.mvpLocation = gl.getUniformLocation(program, "modelViewProjection"),
this.textureLocation = gl.getUniformLocation(program, "u_texture"),
this.vertexLocation = gl.getAttribLocation(program, "vertex"),
this.texCoordsLocation = gl.getAttribLocation(program, "texCoords"),
this.alphaLocation = gl.getUniformLocation(program, "uAlpha");
// set uniform
gl.uniform1i(this.textureLocation, 0);
gl.uniform3f(this.lightDirLocation, 1.0, 1.0, 1.0);
this.loaded = true;
},
use: function () {
gl.useProgram(this.program);
}
};
$.get("shaders/texture/vertex.glsl", function(data, response) {
shader.vertexShader = webgl.createShader(webgl.gl, webgl.gl.VERTEX_SHADER, data);
shader.create.call(shader);
}, "html");
$.get("shaders/texture/fragment.glsl", function(data, response) {
shader.fragmentShader = webgl.createShader(webgl.gl, webgl.gl.FRAGMENT_SHADER, data);
shader.create.call(shader);
}, "html");
return shader;
},
/**
* Create a special shader for the Particle System
*
* @author: Benedikt Klotz
**/
createParticleShader: function () {
var gl = this.gl;
var shader = {
program: -1,
loaded: false,
mvpLocation: -1,
vertexLocation: -1,
dirLocation: -1,
create: function() {
if (this.vertexShader === undefined || this.fragmentShader === undefined) {
return;
}
var program = webgl.createProgram(this.vertexShader, this.fragmentShader);
this.program = program;
this.use();
var shader = {};
// resolve locations
this.mvpLocation = gl.getUniformLocation(program, "modelViewProjection"),
this.timeLocation = gl.getUniformLocation(program, "u_time"),
this.vertexLocation = gl.getAttribLocation(program, "vertex"),
this.ageLocation = gl.getUniformLocation(program, "maxAlter");
this.colorLocation = gl.getAttribLocation(program, "initialColor"),
this.velocityLocation = gl.getAttribLocation(program, "velocity"),
this.startTimeLocation = gl.getAttribLocation(program, "startTime"),
this.sizeLocation = gl.getAttribLocation(program, "size"),
this.loaded = true;
},
use: function () {
gl.useProgram(this.program);
}
};
$.get("shaders/particle/vertex.glsl", function(data) {
shader.vertexShader = webgl.createShader(webgl.gl, webgl.gl.VERTEX_SHADER, data);
shader.create.call(shader);
}, "html");
$.get("shaders/particle/fragment.glsl", function(data) {
shader.fragmentShader = webgl.createShader(webgl.gl, webgl.gl.FRAGMENT_SHADER, data);
shader.create.call(shader);
}, "html");
return shader;
},
setupKeyHandler: function() {
var m = this.matrices;
$("body").keydown(function (event) {
switch (event.keyCode) {
case 107:
m.zoomOut.call(m);
break;
case 109:
m.zoomIn.call(m);
break;
case 39:
if (event.shiftKey) {
m.rotateZAxis.call(m, 1);
} else {
m.moveLeft.call(m);
/** disabled
* m.rotateObjectsLeft.call(m);
**/
}
break;
case 37:
if (event.shiftKey) {
m.rotateZAxis.call(m, -1);
} else {
m.moveRight.call(m);
/** disabled
* m.rotateObjectsRight.call(m);
**/
}
break;
case 38:
if (event.shiftKey) {
m.rotateXAxis.call(m, 1);
} else {
m.moveUp.call(m);
}
break;
case 40:
if (event.shiftKey) {
m.rotateXAxis.call(m, -1);
} else {
m.moveDown.call(m);
}
break;
case 82:
m.reset.call(m);
break;
}
});
},
/**
* Create ground object
*
* @author: Benedikt Klotz
*/
makeGround: function (gl){
var buffer = { };
// vertices array
var vertices =
[ -1,-1,-1, 1,-1,-1, 1,-1, 1, -1,-1, 1 ];
// normal array
var normals =
[ 0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1, 0 ];
// texCoord array
var texCoords =
[ 0, 0, 1, 0, 1, 1, 0, 1 ];
// index array
var indices =
[ 0, 1, 2, 0, 2, 3 ];
buffer.vertexObject = this.createBuffer_f32(gl, vertices);
buffer.texCoordObject = this.createBuffer_f32(gl, texCoords);
buffer.normalObject = this.createBuffer_f32(gl,normals);
buffer.indexObject = this.createBuffer_ui8(gl, indices);
buffer.numIndices = indices.length;
return buffer;
},
/**
* create an upwards open box
*
* @author: Benedikt Klotz
**/
makeOpenBox: function (gl){
var buffer = { };
// vertices array
var vertices =
[ 1, 1, 1, -1, 1, 1, -1,-1, 1, 1,-1, 1, // v0-v1-v2-v3 front
1, 1, 1, 1,-1, 1, 1,-1,-1, 1, 1,-1, // v0-v3-v4-v5 right
1, 1, 1, 1, 1,-1, -1, 1,-1, -1, 1, 1, // v0-v5-v6-v1 top
-1, 1, 1, -1, 1,-1, -1,-1,-1, -1,-1, 1, // v1-v6-v7-v2 left
-1,-1,-1, 1,-1,-1, 1,-1, 1, -1,-1, 1, // v7-v4-v3-v2 bottom
1,-1,-1, -1,-1,-1, -1, 1,-1, 1, 1,-1 ]; // v4-v7-v6-v5 back
// normal array
var normals =
[ 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, // v0-v1-v2-v3 front
1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, 0, // v0-v3-v4-v5 right
0, 1, 0, 0, 1, 0, 0, 1, 0, 0, 1, 0, // v0-v5-v6-v1 top
-1, 0, 0, -1, 0, 0, -1, 0, 0, -1, 0, 0, // v1-v6-v7-v2 left
0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1, 0, // v7-v4-v3-v2 bottom
0, 0,-1, 0, 0,-1, 0, 0,-1, 0, 0,-1 ]; // v4-v7-v6-v5 back
// texCoord array
var texCoords =
[ 1, 1, 0, 1, 0, 0, 1, 0, // v0-v1-v2-v3 front
0, 1, 0, 0, 1, 0, 1, 1, // v0-v3-v4-v5 right
1, 0, 1, 1, 0, 1, 0, 0, // v0-v5-v6-v1 top
1, 1, 0, 1, 0, 0, 1, 0, // v1-v6-v7-v2 left
0, 0, 1, 0, 1, 1, 0, 1, // v7-v4-v3-v2 bottom
0, 0, 1, 0, 1, 1, 0, 1 ]; // v4-v7-v6-v5 back
// index array
var indices =
[ 0, 1, 2, 0, 2, 3, // front
4, 5, 6, 4, 6, 7, // right
12,13,14, 12,14,15, // left
16,17,18, 16,18,19, // bottom
20,21,22, 20,22,23 ]; // back
buffer.vertexObject = this.createBuffer_f32(gl, vertices);
buffer.texCoordObject = this.createBuffer_f32(gl, texCoords);
buffer.normalObject = this.createBuffer_f32(gl,normals);
buffer.indexObject = this.createBuffer_ui8(gl, indices);
buffer.numIndices = indices.length;
return buffer;
},
/**
*
* Create Buffer Objects in Bit Size 8 and 32 in float and unsigned int.
* The function with a d is used for dynamic draws
*
* @author: Benedikt Klotz
**/
createBuffer_f32: function (gl, data) {
var vbo = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(data), gl.STATIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
return vbo;
},
createBuffer_f32_d: function (gl, data) {
var vbo = gl.createBuffer();
gl.bindBuffer(gl.ARRAY_BUFFER, vbo);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(data), gl.DYNAMIC_DRAW);
gl.bindBuffer(gl.ARRAY_BUFFER, null);
return vbo;
},
createBuffer_ui8: function (gl, data) {
var vbo = gl.createBuffer();
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, vbo);
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint8Array(data), gl.STATIC_DRAW);
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, null);
return vbo;
},
/**
* Create a particle system, whose particle are black and are going in all directions
*
* @author: Benedikt Klotz, Silke Rohn
**/
createParticle: function (dir) {
var particle = {};
particle.position = [1, 1, 1];
switch(dir) {
case 0:
particle.velocity = [Math.random()*.1, Math.random()*.1, Math.random()*.1];
break;
case 1:
particle.velocity = [-Math.random()*.1, Math.random()*.1, Math.random()*.1];
break;
case 2:
particle.velocity = [Math.random()*.1, -Math.random()*.1, Math.random()*.1];
break;
case 3:
particle.velocity = [Math.random()*.1, Math.random()*.1, -Math.random()*.1];
break;
case 4:
particle.velocity = [-Math.random()*.1, -Math.random()*.1, Math.random()*.1];
break;
case 5:
particle.velocity = [-Math.random()*.1, Math.random()*.1, -Math.random()*.1];
break;
case 6:
particle.velocity = [Math.random()*.1, -Math.random()*.1, -Math.random()*.1];
break;
case 7:
particle.velocity = [-Math.random()*.1, -Math.random()*.1, -Math.random()*.1];
break;
default:
console.log("Error - particle creation: Unknown Direction - " + dir);
break;
}
// start with black particles
particle.color = [0.0, 0.0, 0.0, 1.0];
particle.startTime = Math.random() * 10 + 1;
return particle;
},
createParticelSystem: function(gl) {
var particles = [];
for (var i=0, dir=0; i<100000; i++, dir++) {
if(dir == 8) {
dir=0;
}
particles.push(this.createParticle(dir));
}
var vertices = [];
var velocities = [];
var colors = [];
var startTimes = [];
var dirs = [];
for (i=0; i<particles.length; i++) {
var particle = particles[i];
vertices.push(particle.position[0]);
vertices.push(particle.position[1]);
vertices.push(particle.position[2]);
velocities.push(particle.velocity[0]);
velocities.push(particle.velocity[1]);
velocities.push(particle.velocity[2]);
colors.push(particle.color[0]);
colors.push(particle.color[1]);
colors.push(particle.color[2]);
colors.push(particle.color[3]);
startTimes.push(particle.startTime);
dirs.push(particle.dir);
}
// create gl Buffer for particles
var buffer = { };
buffer.particleObject = particles;
buffer.vertexObject = this.createBuffer_f32(gl, vertices);
buffer.velocityObject = this.createBuffer_f32_d(gl, velocities);
buffer.colorObject = this.createBuffer_f32_d(gl, colors);
buffer.startTimeObject = this.createBuffer_f32_d(gl, startTimes);
buffer.dirObject = this.createBuffer_f32(gl, dirs);
// save object properties for update later
buffer.velocities = velocities;
buffer.startTimes = startTimes;
buffer.colors = colors;
buffer.particle = true;
return buffer;
},
loadTexture: function(gl, path, object)
{
var texture = gl.createTexture();
var image = new Image();
g_loadingImages.push(image);
image.onload = function() { webgl.doLoadTexture.call(webgl, gl, image, texture, object) }
image.src = path;
return texture;
},
doLoadTexture: function(gl, image, texture, object)
{
g_loadingImages.splice(g_loadingImages.indexOf(image), 1);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
// Set Texture Parameter
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.bindTexture(gl.TEXTURE_2D, null);
// Set texture loaded
object.loaded = true;
},
/**
* Initialize all systems and objects
*
* @author: Benedikt Klotz
**/
init: function (canvasName, vertexShaderName, fragmentShaderName) {
var canvas, gl;
// Setup Error reporting
$(document).ajaxError(function(e,xhr,opt){
console.log("Error requesting " + opt.url + ": " + xhr.status + " " + xhr.statusText);
});
// setup the API
canvas = document.getElementById(canvasName);
gl = canvas.getContext("experimental-webgl",{premultipliedAlpha:false});
this.gl = gl;
gl.viewport(0, 0, canvas.width, canvas.height);
// make background blue
gl.clearColor(0.0, 0.0, 0.5, 0.6);
gl.enable(gl.DEPTH_TEST);
this.systemInfo();
// create the projection matrix
this.matrices.init.call(this.matrices);
// ground objects
var object = this.makeGround.call(this, gl)
object.indexSize = gl.UNSIGNED_BYTE;
object.name = "ground";
object.blending = false;
// Enable Front Face Culling
object.culling = true;
object.texture = this.loadTexture.call(this, gl, "textures/metall.jpg", object);
object.shader = this.createObjectShader();
object.model = function() {
var model = new J3DIMatrix4();
model.scale(1.6,1.2,1.8)
model.rotate(this.objectAngle, 0.0, 1.0, 0.0);
return model
};
this.objects[this.objects.length] = object;
// create a open box
object = this.makeOpenBox.call(this, gl);
object.indexSize = gl.UNSIGNED_BYTE;
object.name = "box";
// enable object blending
object.blending = true;
object.texture = this.loadTexture.call(this, gl, "textures/glas.jpg", object);
object.shader = this.createObjectShader();
object.model = function() {
var model = new J3DIMatrix4();
model.scale(0.5,0.5,0.5)
model.translate(0,-1.39,0);
model.rotate(this.objectAngle, 0.0, 1.0, 0.0);
return model;
};
this.objects[this.objects.length] = object;
// particle objects
var object = this.createParticelSystem(gl)
object.shader = this.createParticleShader();
object.loaded = true;
object.blending = true;
object.model = function() {
var model = new J3DIMatrix4();
model.translate(-1.0,-1.7,-1.0);
model.rotate(this.objectAngle, 0.0, 1.0, 0.0);
return model;
};
// Reset particle startTime if there are discarded
setInterval(function() {
var particles = object.particleObject;
var changed = false;
for (var i=0; i<particles.length; i++) {
if(object.startTimes[i] + 7.0 <= webgl.time) {
object.startTimes[i] = webgl.time + 3.0*Math.random();
changed = true;
}
}
if(changed) {
gl.bindBuffer(gl.ARRAY_BUFFER,object.startTimeObject);
gl.bufferSubData(gl.ARRAY_BUFFER, 0, new Float32Array(object.startTimes));
}
}, 1000);
this.objects[this.objects.length] = object;
// setup animation
this.repaintLoop.setup.call(this);
if(this.debug) {
// setup handlers
this.setupKeyHandler();
}
},
displayFunc: function () {
var gl = this.gl;
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
for (var i = 0; i < this.objects.length; i++) {
var object, shader, modelView, normalMatrix, modelViewProjection;
object = this.objects[i];
if (object.shader === undefined) {
// no shader is set, cannot render
continue;
}
if (object.shader.loaded !== undefined && object.shader.loaded === false) {
// shader not yet loaded
continue;
}
shader = object.shader;
shader.use();
// create the matrices
modelViewProjection = new J3DIMatrix4(this.matrices.projection);
modelView = new J3DIMatrix4(this.matrices.viewing);
if (object.model !== undefined) {
modelView.multiply(object.model.call(this));
}
modelViewProjection.multiply(modelView);
if (shader.mvpLocation !== undefined) {
modelViewProjection.setUniform(gl, shader.mvpLocation, false);
}
if (shader.normalMatrixLocation) {
normalMatrix = new J3DIMatrix4();
normalMatrix.load(modelView);
normalMatrix.invert();
normalMatrix.transpose();
normalMatrix.setUniform(gl, shader.normalMatrixLocation, false)
}
this.drawObject(gl, object, shader);
this.checkError("drawObject: " + i);
}
this.checkError("displayFunc");
}
};<|fim▁end|>
|
window.alert("Die kritische Masse ist explodiert!!!");
}
|
<|file_name|>_tickformat.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators<|fim▁hole|> self,
plotly_name="tickformat",
parent_name="scattercarpet.marker.colorbar",
**kwargs
):
super(TickformatValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "colorbars"),
role=kwargs.pop("role", "style"),
**kwargs
)<|fim▁end|>
|
class TickformatValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
|
<|file_name|>macros.rs<|end_file_name|><|fim▁begin|>#[macro_use]
extern crate gfx;
pub use gfx::format as fm;
#[derive(Clone, Debug)]
pub struct Rg16;
gfx_format!(Rg16: R16_G16 = Vec2<Float>);
gfx_defines!{
#[derive(PartialEq)]
vertex Vertex {
_x: i8 = "x",
_y: f32 = "y",
}
vertex Instance {
pos: [f32; 2] = "pos",
color: [f32; 3] = "color",
}
constant Local {
pos: [u32; 4] = "pos",
}
#[derive(PartialEq)] #[derive(PartialOrd)]
constant LocalMeta {
pos: [u32; 4] = "pos_meta",
}
pipeline testpipe {
vertex: gfx::VertexBuffer<Vertex> = (),
instance: gfx::InstanceBuffer<Instance> = (),
const_locals: gfx::ConstantBuffer<Local> = "Locals",
global: gfx::Global<[f32; 4]> = "Global",
tex_diffuse: gfx::ShaderResource<[f32; 4]> = "Diffuse",
sampler_linear: gfx::Sampler = "Linear",
buf_frequency: gfx::UnorderedAccess<[f32; 4]> = "Frequency",
pixel_color: gfx::RenderTarget<fm::Rgba8> = "Color",
blend_target: gfx::BlendTarget<Rg16> =
("o_Color1", gfx::state::MASK_ALL, gfx::preset::blend::ADD),
depth: gfx::DepthTarget<gfx::format::DepthStencil> =
gfx::preset::depth::LESS_EQUAL_TEST,
blend_ref: gfx::BlendRef = (),
scissor: gfx::Scissor = (),
}
}
fn _test_pso<R, F>(factory: &mut F) -> gfx::PipelineState<R, testpipe::Meta> where
R: gfx::Resources,
F: gfx::traits::FactoryExt<R>,
{
factory.create_pipeline_simple(&[], &[], testpipe::new()).unwrap()
}
gfx_pipeline_base!( testraw {
vertex: gfx::RawVertexBuffer,
cbuf: gfx::RawConstantBuffer,
tex: gfx::RawShaderResource,
target: gfx::RawRenderTarget,
});
fn _test_raw<R, F>(factory: &mut F) -> gfx::PipelineState<R, testraw::Meta> where
R: gfx::Resources,
F: gfx::traits::FactoryExt<R>,
{
let special = gfx::pso::buffer::Element {
format: fm::Format(fm::SurfaceType::R32, fm::ChannelType::Float),
offset: 0,
};<|fim▁hole|> tex: "Specular",
target: ("o_Color2",
fm::Format(fm::SurfaceType::R8_G8_B8_A8, fm::ChannelType::Unorm),
gfx::state::MASK_ALL, None),
};
factory.create_pipeline_simple(&[], &[], init).unwrap()
}<|fim▁end|>
|
let init = testraw::Init {
vertex: (&[("a_Special", special)], 12, 0),
cbuf: "Locals",
|
<|file_name|>EditorViewSet.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from rest_framework import viewsets
from rest_framework.response import Response
import snotes20.models as models
class EditorViewSet(viewsets.ViewSet):
def list(self, request):
return Response([
{
'short': short,<|fim▁hole|> ])<|fim▁end|>
|
'long': long,
'url': settings.EDITORS[short]['userurl']
} for short, long in models.EDITOR_CHOICES
|
<|file_name|>slack.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
#
# Copyright (c) 2017 Nick Douma
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the<|fim▁hole|>#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from argparse import ArgumentParser, ArgumentTypeError
import datetime
import json
import re
import urllib.error
import urllib.parse
import urllib.request
import sys
ISO8601 = r"^(\d{4})-?(\d{2})-?(\d{2})?[T ]?(\d{2}):?(\d{2}):?(\d{2})"
def iso8601_to_unix_timestamp(value):
try:
return int(value)
except ValueError:
pass
matches = re.match(ISO8601, value)
if not matches:
raise ArgumentTypeError("Argument is not a valid UNIX or ISO8601 "
"timestamp.")
return int(datetime.datetime(
*[int(m) for m in matches.groups()])).timestamp()
def hex_value(value):
value = value.replace("#", "")
if not re.match(r"^[a-f0-9]{6}$", value):
raise ArgumentTypeError("Argument is not a valid hex value.")
return value
parser = ArgumentParser(description="Send notifications using Slack")
parser.add_argument("--webhook-url", help="Webhook URL.", required=True)
parser.add_argument("--channel", help="Channel to post to (prefixed with #), "
"or a specific user (prefixed with @).")
parser.add_argument("--username", help="Username to post as")
parser.add_argument("--title", help="Notification title.")
parser.add_argument("--title_link", help="Notification title link.")
parser.add_argument("--color", help="Sidebar color (as a hex value).",
type=hex_value)
parser.add_argument("--ts", help="Unix timestamp or ISO8601 timestamp "
"(will be converted to Unix timestamp).",
type=iso8601_to_unix_timestamp)
parser.add_argument("message", help="Notification message.")
args = parser.parse_args()
message = {}
for param in ["channel", "username"]:
value = getattr(args, param)
if value:
message[param] = value
attachment = {}
for param in ["title", "title_link", "color", "ts", "message"]:
value = getattr(args, param)
if value:
attachment[param] = value
attachment['fallback'] = attachment['message']
attachment['text'] = attachment['message']
del attachment['message']
message['attachments'] = [attachment]
payload = {"payload": json.dumps(message)}
try:
parameters = urllib.parse.urlencode(payload).encode('UTF-8')
url = urllib.request.Request(args.webhook_url, parameters)
responseData = urllib.request.urlopen(url).read()
except urllib.error.HTTPError as he:
print("Sending message to Slack failed: {}".format(he))
sys.exit(1)<|fim▁end|>
|
# Software is furnished to do so, subject to the following conditions:
|
<|file_name|>euler.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The CGMath Developers. For a full listing of the authors,
// refer to the Cargo.toml file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use num_traits::cast;
#[cfg(feature = "rand")]
use rand::{
distributions::{Distribution, Standard},
Rng,
};
use structure::*;
use angle::Rad;
use approx;
#[cfg(feature = "mint")]
use mint;
use num::BaseFloat;
use quaternion::Quaternion;
/// A set of [Euler angles] representing a rotation in three-dimensional space.
///
/// This type is marked as `#[repr(C)]`.
///
/// The axis rotation sequence is XYZ. That is, the rotation is first around
/// the X axis, then the Y axis, and lastly the Z axis (using intrinsic
/// rotations). Since all three rotation axes are used, the angles are
/// Tait–Bryan angles rather than proper Euler angles.<|fim▁hole|>/// - y: [-pi/2, pi/2]
/// - z: [-pi, pi]
///
/// # Defining rotations using Euler angles
///
/// Note that while [Euler angles] are intuitive to define, they are prone to
/// [gimbal lock] and are challenging to interpolate between. Instead we
/// recommend that you convert them to a more robust representation, such as a
/// quaternion or a rotation matrix. To this end, `From<Euler<A>>` conversions
/// are provided for the following types:
///
/// - [`Basis3`](struct.Basis3.html)
/// - [`Matrix3`](struct.Matrix3.html)
/// - [`Matrix4`](struct.Matrix4.html)
/// - [`Quaternion`](struct.Quaternion.html)
///
/// For example, to define a quaternion that applies the following:
///
/// 1. a 90° rotation around the _x_ axis
/// 2. a 45° rotation around the _y_ axis
/// 3. a 15° rotation around the _z_ axis
///
/// you can use the following code:
///
/// ```
/// use cgmath::{Deg, Euler, Quaternion};
///
/// let rotation = Quaternion::from(Euler {
/// x: Deg(90.0),
/// y: Deg(45.0),
/// z: Deg(15.0),
/// });
/// ```
///
/// [Euler angles]: https://en.wikipedia.org/wiki/Euler_angles
/// [gimbal lock]: https://en.wikipedia.org/wiki/Gimbal_lock#Gimbal_lock_in_applied_mathematics
/// [convert]: #defining-rotations-using-euler-angles
#[repr(C)]
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct Euler<A> {
/// The angle to apply around the _x_ axis. Also known at the _pitch_.
pub x: A,
/// The angle to apply around the _y_ axis. Also known at the _yaw_.
pub y: A,
/// The angle to apply around the _z_ axis. Also known at the _roll_.
pub z: A,
}
impl<A> Euler<A> {
/// Construct a set of euler angles.
///
/// # Arguments
///
/// * `x` - The angle to apply around the _x_ axis. Also known at the _pitch_.
/// * `y` - The angle to apply around the _y_ axis. Also known at the _yaw_.
/// * `z` - The angle to apply around the _z_ axis. Also known at the _roll_.
pub const fn new(x: A, y: A, z: A) -> Euler<A> {
Euler { x, y, z }
}
}
impl<S: BaseFloat> From<Quaternion<S>> for Euler<Rad<S>> {
fn from(src: Quaternion<S>) -> Euler<Rad<S>> {
let sig: S = cast(0.499).unwrap();
let two: S = cast(2).unwrap();
let one: S = cast(1).unwrap();
let (qw, qx, qy, qz) = (src.s, src.v.x, src.v.y, src.v.z);
let (sqw, sqx, sqy, sqz) = (qw * qw, qx * qx, qy * qy, qz * qz);
let unit = sqx + sqz + sqy + sqw;
let test = qx * qz + qy * qw;
// We set x to zero and z to the value, but the other way would work too.
if test > sig * unit {
// x + z = 2 * atan(x / w)
Euler {
x: Rad::zero(),
y: Rad::turn_div_4(),
z: Rad::atan2(qx, qw) * two,
}
} else if test < -sig * unit {
// x - z = 2 * atan(x / w)
Euler {
x: Rad::zero(),
y: -Rad::turn_div_4(),
z: -Rad::atan2(qx, qw) * two,
}
} else {
// Using the quat-to-matrix equation from either
// http://www.euclideanspace.com/maths/geometry/rotations/conversions/quaternionToMatrix/index.htm
// or equation 15 on page 7 of
// http://ntrs.nasa.gov/archive/nasa/casi.ntrs.nasa.gov/19770024290.pdf
// to fill in the equations on page A-2 of the NASA document gives the below.
Euler {
x: Rad::atan2(two * (-qy * qz + qx * qw), one - two * (sqx + sqy)),
y: Rad::asin(two * (qx * qz + qy * qw)),
z: Rad::atan2(two * (-qx * qy + qz * qw), one - two * (sqy + sqz)),
}
}
}
}
impl<A: Angle> approx::AbsDiffEq for Euler<A> {
type Epsilon = A::Epsilon;
#[inline]
fn default_epsilon() -> A::Epsilon {
A::default_epsilon()
}
#[inline]
fn abs_diff_eq(&self, other: &Self, epsilon: A::Epsilon) -> bool {
A::abs_diff_eq(&self.x, &other.x, epsilon)
&& A::abs_diff_eq(&self.y, &other.y, epsilon)
&& A::abs_diff_eq(&self.z, &other.z, epsilon)
}
}
impl<A: Angle> approx::RelativeEq for Euler<A> {
#[inline]
fn default_max_relative() -> A::Epsilon {
A::default_max_relative()
}
#[inline]
fn relative_eq(&self, other: &Self, epsilon: A::Epsilon, max_relative: A::Epsilon) -> bool {
A::relative_eq(&self.x, &other.x, epsilon, max_relative)
&& A::relative_eq(&self.y, &other.y, epsilon, max_relative)
&& A::relative_eq(&self.z, &other.z, epsilon, max_relative)
}
}
impl<A: Angle> approx::UlpsEq for Euler<A> {
#[inline]
fn default_max_ulps() -> u32 {
A::default_max_ulps()
}
#[inline]
fn ulps_eq(&self, other: &Self, epsilon: A::Epsilon, max_ulps: u32) -> bool {
A::ulps_eq(&self.x, &other.x, epsilon, max_ulps)
&& A::ulps_eq(&self.y, &other.y, epsilon, max_ulps)
&& A::ulps_eq(&self.z, &other.z, epsilon, max_ulps)
}
}
#[cfg(feature = "rand")]
impl<A> Distribution<Euler<A>> for Standard
where
Standard: Distribution<A>,
A: Angle,
{
fn sample<R: Rng + ?Sized>(&self, rng: &mut R) -> Euler<A> {
Euler {
x: rng.gen(),
y: rng.gen(),
z: rng.gen(),
}
}
}
#[cfg(feature = "mint")]
type MintEuler<S> = mint::EulerAngles<S, mint::IntraXYZ>;
#[cfg(feature = "mint")]
impl<S, A: Angle + From<S>> From<MintEuler<S>> for Euler<A> {
fn from(mint: MintEuler<S>) -> Self {
Euler {
x: mint.a.into(),
y: mint.b.into(),
z: mint.c.into(),
}
}
}
#[cfg(feature = "mint")]
impl<S: Clone, A: Angle + Into<S>> From<Euler<A>> for MintEuler<S> {
fn from(v: Euler<A>) -> Self {
MintEuler::from([v.x.into(), v.y.into(), v.z.into()])
}
}<|fim▁end|>
|
///
/// # Ranges
///
/// - x: [-pi, pi]
|
<|file_name|>InputHandler.js<|end_file_name|><|fim▁begin|>function InputHandler(viewport) {
var self = this;
self.pressedKeys = {};
self.mouseX = 0;
self.mouseY = 0;
self.mouseDownX = 0;
self.mouseDownY = 0;
self.mouseMoved = false;
self.mouseDown = false;
self.mouseButton = 0; // 1 = left | 2 = middle | 3 = right
self.viewport = viewport;
var viewportElem = viewport.get(0);
viewportElem.ondrop = function(event) { self.onDrop(event); };
viewportElem.ondragover = function(event) { event.preventDefault(); };
viewportElem.onclick = function(event) { self.onClick(event); };
viewportElem.onmousedown = function(event) { self.onMouseDown(event); };
window.onmouseup = function(event) { self.onMouseUp(event); };
window.onkeydown = function(event) { self.onKeyDown(event); };
window.onkeyup = function(event) { self.onKeyUp(event); };
window.onmousemove = function(event) { self.onMouseMove(event); };
viewportElem.oncontextmenu = function(event) { event.preventDefault();};
viewport.mousewheel(function(event) { self.onScroll(event); });
}
InputHandler.prototype.update = function(event) {
for(var key in this.pressedKeys) {
this.target.onKeyDown(key);
}
};
InputHandler.prototype.onKeyUp = function(event) {
delete this.pressedKeys[event.keyCode];
};
InputHandler.prototype.onKeyDown = function(event) {
// Avoid capturing key events from input boxes and text areas
var tag = event.target.tagName.toLowerCase();
if (tag == 'input' || tag == 'textarea') return;
var keyCode = event.keyCode;<|fim▁hole|>};
InputHandler.prototype.isKeyDown = function(key) {
var isDown = this.pressedKeys[key] !== undefined;
return isDown;
};
// Return mouse position in [0,1] range relative to bottom-left of viewport (screen space)
InputHandler.prototype.convertToScreenSpace = function(pageX, pageY) {
var left = this.viewport.offset().left;
var top = this.viewport.offset().top;
var width = this.viewport.innerWidth();
var height = this.viewport.innerHeight();
var x = (pageX - left)/width;
var y = -(pageY - top)/height + 1.0;
return [x,y];
};
InputHandler.prototype.onDrop = function(event) {
event.preventDefault();
var mouse = this.convertToScreenSpace(event.pageX, event.pageY);
var assetName = event.dataTransfer.getData("text");
this.target.dropAsset(assetName, mouse[0], mouse[1]);
};
InputHandler.prototype.onClick = function(event) {
if(this.mouseMoved) return;
var screenSpace = this.convertToScreenSpace(event.pageX, event.pageY);
this.target.onClick(screenSpace[0], screenSpace[1]);
};
InputHandler.prototype.onMouseDown = function(event) {
this.viewport.focus();
if(this.mouseButton > 0) return; // Don't process a mouse down from a different button until the current one is done
this.mouseButton = event.which;
this.mouseDown = true;
var mouseX = event.pageX;
var mouseY = event.pageY;
this.mouseDownX = mouseX;
this.mouseDownY = mouseY;
this.mouseMoved = false;
var screenSpace = this.convertToScreenSpace(mouseX, mouseY);
this.target.onMouseDown(screenSpace[0], screenSpace[1], this.mouseButton)
};
InputHandler.prototype.onMouseUp = function(event) {
this.mouseDown = false;
this.mouseButton = 0;
};
InputHandler.prototype.onMouseMove = function(event) {
var mouseX = event.pageX;
var mouseY = event.pageY;
// Ignore click if mouse moved too much between mouse down and mouse click
if(Math.abs(this.mouseDownX - mouseX) > 3 || Math.abs(this.mouseDownY - mouseY) > 3) {
this.mouseMoved = true;
}
if(this.mouseDown) {
event.preventDefault();
}
var mouseMoveX = mouseX - this.mouseX;
var mouseMoveY = mouseY - this.mouseY;
this.mouseX = mouseX;
this.mouseY = mouseY;
var screenSpace = this.convertToScreenSpace(mouseX, mouseY);
this.target.onMouseMove(screenSpace[0], screenSpace[1], mouseMoveX, mouseMoveY, this.mouseButton);
};
InputHandler.prototype.onScroll = function(event) {
this.target.onScroll(event.deltaY);
};<|fim▁end|>
|
this.pressedKeys[keyCode] = true;
var ctrl = event.ctrlKey;
this.target.onKeyPress(keyCode, ctrl);
|
<|file_name|>rows.rs<|end_file_name|><|fim▁begin|>use crate::{
Dao,
Value,
};
use serde_derive::{
Deserialize,
Serialize,
};
use std::slice;
/// use this to store data retrieved from the database
/// This is also slimmer than Vec<Dao> when serialized<|fim▁hole|> /// can be optionally set, indicates how many total rows are there in the table
pub count: Option<usize>,
}
impl Rows {
pub fn empty() -> Self { Rows::new(vec![]) }
pub fn new(columns: Vec<String>) -> Self {
Rows {
columns,
data: vec![],
count: None,
}
}
pub fn push(&mut self, row: Vec<Value>) { self.data.push(row) }
/// Returns an iterator over the `Row`s.
pub fn iter(&self) -> Iter {
Iter {
columns: self.columns.clone(),
iter: self.data.iter(),
}
}
}
/// An iterator over `Row`s.
pub struct Iter<'a> {
columns: Vec<String>,
iter: slice::Iter<'a, Vec<Value>>,
}
impl<'a> Iterator for Iter<'a> {
type Item = Dao;
fn next(&mut self) -> Option<Dao> {
let next_row = self.iter.next();
if let Some(row) = next_row {
if !row.is_empty() {
let mut dao = Dao::new();
for (i, column) in self.columns.iter().enumerate() {
if let Some(value) = row.get(i) {
dao.insert_value(column, value);
}
}
Some(dao)
} else {
None
}
} else {
None
}
}
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
}
impl<'a> ExactSizeIterator for Iter<'a> {}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn iteration_count() {
let columns = vec!["id".to_string(), "username".to_string()];
let data: Vec<Vec<Value>> = vec![vec![1.into(), "ivanceras".into()]];
let rows = Rows {
columns,
data,
count: None,
};
assert_eq!(1, rows.iter().count());
}
#[test]
fn iteration_count2() {
let columns = vec!["id".to_string(), "username".to_string()];
let data: Vec<Vec<Value>> = vec![vec![1.into(), "ivanceras".into()], vec![
2.into(),
"lee".into(),
]];
let rows = Rows {
columns,
data,
count: None,
};
assert_eq!(2, rows.iter().count());
}
#[test]
fn dao() {
let columns = vec!["id".to_string(), "username".to_string()];
let data: Vec<Vec<Value>> = vec![vec![1.into(), "ivanceras".into()]];
let rows = Rows {
columns,
data,
count: None,
};
let mut dao = Dao::new();
dao.insert("id", 1);
dao.insert("username", "ivanceras");
assert_eq!(dao, rows.iter().next().unwrap());
}
#[test]
fn dao2() {
let columns = vec!["id".to_string(), "username".to_string()];
let data: Vec<Vec<Value>> = vec![vec![1.into(), "ivanceras".into()], vec![
2.into(),
"lee".into(),
]];
let rows = Rows {
columns,
data,
count: None,
};
let mut iter = rows.iter();
let mut dao = Dao::new();
dao.insert("id", 1);
dao.insert("username", "ivanceras");
assert_eq!(dao, iter.next().unwrap());
let mut dao2 = Dao::new();
dao2.insert("id", 2);
dao2.insert("username", "lee");
assert_eq!(dao2, iter.next().unwrap());
}
}<|fim▁end|>
|
#[derive(Debug, PartialEq, Serialize, Deserialize, Clone)]
pub struct Rows {
pub columns: Vec<String>,
pub data: Vec<Vec<Value>>,
|
<|file_name|>Classes.java<|end_file_name|><|fim▁begin|>package lesson.types;
public class Classes {
public static void main(String[] args) {
JustClass one = new JustClass();
JustClass two = new JustClass(123, "sdf");
System.out.println(one);
System.out.println(two);
}
}
class JustClass {
private int number;
private String name;
public JustClass() { }
public JustClass(int number, String name) {
this.number = number;
this.name = name;
}
<|fim▁hole|> }
}<|fim▁end|>
|
@Override
public String toString() {
return String
.format("JustClass {%s, %d}", name,number);
|
<|file_name|>mymedialite.py<|end_file_name|><|fim▁begin|>##
# Copyright 2009-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by<|fim▁hole|># MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for MyMediaLite, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
from distutils.version import LooseVersion
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.run import run_cmd
class EB_MyMediaLite(ConfigureMake):
"""Support for building/installing MyMediaLite."""
def configure_step(self):
"""Custom configure step for MyMediaLite, using "make CONFIGURE_OPTIONS='...' configure"."""
if LooseVersion(self.version) < LooseVersion('3'):
cmd = "make CONFIGURE_OPTIONS='--prefix=%s' configure" % self.installdir
run_cmd(cmd, log_all=True, simple=True)
else:
self.cfg.update('installopts', "PREFIX=%s" % self.installdir)
def build_step(self):
"""Custom build step for MyMediaLite, using 'make all' in 'src' directory."""
cmd = "cd src && make all && cd .."
run_cmd(cmd, log_all=True, simple=True)
def sanity_check_step(self):
"""Custom sanity check for MyMediaLite."""
if LooseVersion(self.version) < LooseVersion('3'):
bin_files = ["bin/%s_prediction" % x for x in ['item', 'mapping_item', 'mapping_rating', 'rating']]
else:
bin_files = ["bin/item_recommendation", "bin/rating_based_ranking", "bin/rating_prediction"]
custom_paths = {
'files': bin_files,
'dirs': ["lib/mymedialite"],
}
super(EB_MyMediaLite, self).sanity_check_step(custom_paths=custom_paths)<|fim▁end|>
|
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
<|file_name|>Strategies.py<|end_file_name|><|fim▁begin|>from .. import NogginConstants
from . import PBConstants
from . import Formations
def sReady(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_READY)
Formations.fReady(team, workingPlay)
def sNoFieldPlayers(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_NO_FIELD_PLAYERS)
Formations.fNoFieldPlayers(team, workingPlay)
def sOneField(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_ONE_FIELD_PLAYER)
# no kickoff formation- would be identical to fOneField
# Formation for ball in our goal box
if shouldUseDubD(team):
Formations.fOneDubD(team, workingPlay)
elif useFinder(team):
Formations.fFinder(team, workingPlay)
else:
Formations.fOneField(team, workingPlay)
def sTwoField(team, workingPlay):
'''
This is our standard strategy. Based around the 2008.
'''
workingPlay.setStrategy(PBConstants.S_TWO_FIELD_PLAYERS)
# Kickoff Formations
if useKickoffFormation(team):
Formations.fKickoff(team, workingPlay)
# Formation for ball in our goal box
elif shouldUseDubD(team):
Formations.fTwoDubD(team, workingPlay)
# ball hasn't been seen by me or teammates in a while
elif useFinder(team):
Formations.fFinder(team, workingPlay)
else:
# Keep a defender and a chaser
Formations.fDefensiveTwoField(team, workingPlay)
def sThreeField(team, workingPlay):
'''
This is our pulled goalie strategy.
'''
workingPlay.setStrategy(PBConstants.S_THREE_FIELD_PLAYERS)
# Kickoff Formations
if useKickoffFormation(team):
Formations.fKickoff(team, workingPlay)
# Formation for ball in our goal box
elif shouldUseDubD(team):
Formations.fThreeDubD(team, workingPlay)
# ball hasn't been seen by me or teammates in a while<|fim▁hole|>
# Standard spread formation
else:
Formations.fThreeField(team, workingPlay)
def sTwoZone(team, workingPlay):
"""
We attempt to keep one robot forward and one back
They become chaser if the ball is closer to them
"""
sTwoField(team, workingPlay)
def sWin(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_WIN)
# Kickoff Formations
if useKickoffFormation(team):
Formations.fKickoff(team,workingPlay)
# Formation for ball in our goal box
elif shouldUseDubD(team):
Formations.fTwoDubD(team, workingPlay)
# ball hasn't been seen by me or teammates in a while
elif useFinder(team):
Formations.fFinder(team, workingPlay)
# Move the defender forward if the ball is close enough to opp goal, then become a middie
elif team.brain.ball.x > PBConstants.S_MIDDIE_DEFENDER_THRESH:
Formations.fNeutralDefenseTwoField(team, workingPlay)
else:
Formations.fDefensiveTwoField(team, workingPlay)
# Add strategies for testing various roles
def sTestDefender(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_TEST_DEFENDER)
Formations.fTestDefender(team, workingPlay)
def sTestOffender(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_TEST_OFFENDER)
Formations.fTestOffender(team, workingPlay)
def sTestMiddie(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_TEST_MIDDIE)
Formations.fTestMiddie(team, workingPlay)
def sTestChaser(team, workingPlay):
workingPlay.setStrategy(PBConstants.S_TEST_CHASER)
# Game Ready Setup
if team.brain.gameController.currentState == 'gameReady' or\
team.brain.gameController.currentState =='gameSet':
# team is kicking off
Formations.fReady(team, workingPlay)
else:
Formations.fTestChaser(team, workingPlay)
#not sure this is the best place for these yet...
def useKickoffFormation(team):
if (team.brain.gameController.timeSincePlay() <
PBConstants.KICKOFF_FORMATION_TIME):
return True
else:
return False
def useFinder(team):
if (PBConstants.USE_FINDER and
team.brain.ball.timeSinceSeen() >
PBConstants.FINDER_TIME_THRESH):
return True
else:
return False
def shouldUseDubD(team):
if not PBConstants.USE_DUB_D:
return False
ballY = team.brain.ball.y
ballX = team.brain.ball.x
goalie = team.teammates[0]
return (
( ballY > NogginConstants.MY_GOALBOX_BOTTOM_Y + 5. and
ballY < NogginConstants.MY_GOALBOX_TOP_Y - 5. and
ballX < NogginConstants.MY_GOALBOX_RIGHT_X - 5.) or
( ballY > NogginConstants.MY_GOALBOX_TOP_Y - 5. and
ballY < NogginConstants.MY_GOALBOX_BOTTOM_Y + 5. and
ballX < NogginConstants.MY_GOALBOX_RIGHT_X + 5. and
goalie.isTeammateRole(PBConstants.CHASER) )
)<|fim▁end|>
|
elif useFinder(team):
Formations.fFinder(team, workingPlay)
|
<|file_name|>_mock-principal.service.ts<|end_file_name|><|fim▁begin|><%#
Copyright 2013-2017 the original author or authors.
This file is part of the JHipster project, see https://jhipster.github.io/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");<|fim▁hole|> You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
import { SpyObject } from './spyobject';
import { Principal } from '../../../../main/webapp/app/shared/auth/principal.service';
import Spy = jasmine.Spy;
export class MockPrincipal extends SpyObject {
identitySpy: Spy;
fakeResponse: any;
constructor() {
super(Principal);
this.fakeResponse = {};
this.identitySpy = this.spy('identity').andReturn(Promise.resolve(this.fakeResponse));
}
setResponse(json: any): void {
this.fakeResponse = json;
}
}<|fim▁end|>
|
you may not use this file except in compliance with the License.
|
<|file_name|>BaseDao.java<|end_file_name|><|fim▁begin|>package com.starfarers.dao;
import java.util.List;
public interface BaseDao<T> {
public void create(T entity);
public T save(T entity);
public void save(List<T> entities);
public void remove(T entity);
public T find(Integer id);
<|fim▁hole|>}<|fim▁end|>
|
public List<T> findAll();
public <P> T findBy(String attribute, P parameter);
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>mod sql_safe;
pub use self::sql_safe::{SqlSafe, SQL_CHECK};<|fim▁end|>
|
//! This is documentation for the `mylib::sql::traits` module.
//!
//! # Examples
|
<|file_name|>ProxyConfig.java<|end_file_name|><|fim▁begin|>/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.proxy;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import io.airlift.configuration.validation.FileExists;
import javax.validation.constraints.NotNull;
import java.io.File;
import java.net.URI;
<|fim▁hole|> private File sharedSecretFile;
@NotNull
public URI getUri()
{
return uri;
}
@Config("proxy.uri")
@ConfigDescription("URI of the remote Trino server")
public ProxyConfig setUri(URI uri)
{
this.uri = uri;
return this;
}
@NotNull
@FileExists
public File getSharedSecretFile()
{
return sharedSecretFile;
}
@Config("proxy.shared-secret-file")
@ConfigDescription("Shared secret file used for authenticating URIs")
public ProxyConfig setSharedSecretFile(File sharedSecretFile)
{
this.sharedSecretFile = sharedSecretFile;
return this;
}
}<|fim▁end|>
|
public class ProxyConfig
{
private URI uri;
|
<|file_name|>errors.cc<|end_file_name|><|fim▁begin|>/** File errors.cc author Vladislav Tcendrovskii
* Copyright (c) 2013
* This source subjected to the Gnu General Public License v3 or later (see LICENSE)
* All other rights reserved
* THIS CODE AND INFORMATION ARE PROVIDED "AS IS" WITHOUT WARRANTY
* OF ANY KIND, EITHER EXPRESSED OR IMPLIED,
* INCLUDING BUT NOT LIMITED TO THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND/OR FITNESS FOR A PARTICULAR PURPOSE.
* */
#include "errors.h"
std::string error_str(int errnum)<|fim▁hole|>{
std::string ans;
switch(errnum)
{
case ENO:
ans = "No error";
break;
case EZERO:
ans = "Division by zero";
break;
case EDIM:
ans = "Dimension mismatch";
break;
case EMEM:
ans = "Memory allocation fail";
break;
case ETYPE:
ans = "Wrong object type/form";
break;
case EIND:
ans = "Index out of range";
break;
default:
ans = "Error number " + errnum;
break;
}
return ans;
}<|fim▁end|>
| |
<|file_name|>def_build_graph.py<|end_file_name|><|fim▁begin|>from networkx import DiGraph
from networkx.readwrite import json_graph
import cantera as ct
import numpy as np
import json
#from src.core.def_tools import *
import os
__author__ = 'Xiang Gao'
""" ----------------------------------------------
construction of the element flux graph
-----------------------------------------------"""
def build_flux_graph(soln, raw, traced_element, path_save=None, overwrite=False, i0=0, i1='eq', constV=False):
"""
:param mechanism: type = dict, keys include "species", "reaction", "element", etc<|fim▁hole|> :param i0: type = int, specifying the starting point of the considered interval of the raw data
:param i1: type = int or str, specifying the ending point of the considered interval of the raw data
:return flux graph: type = networkx object, will be also saved as a .json file,
"""
element = soln.element_names
species = soln.species
reaction = soln.reaction
n_rxn = soln.n_reactions
""" --------------------------------
check if results already exist, if so, load
-------------------------------- """
if path_save is not None:
if overwrite is False:
try:
data = json.load(open(path_save, 'r'))
flux_graph = json_graph.node_link_graph(data)
return flux_graph
except IOError:
pass
""" --------------------------------
if not, then compute, and save
-------------------------------- """
# ---------------------------------------------
# check if traced_element is legal
if traced_element not in element:
raise('traced element ' + traced_element + ' is not listed in mechanism')
# ---------------------------------------------
# find the reaction rate during the considered interval
# unit will be converted to mole/sec
rr = np.reshape(raw['net_reaction_rate'][i0,:],[n_rxn,1])
flux_graph = DiGraph()
# -------------------------------------
# adding edge from reactions
# one edge may contribute from multiple reactions, the list of the contributors will be stored in edge['member']
# note though in .cti id_rxn starts from 1, in soln.reaction, id_rxn starts from 0
for id_rxn in range(n_rxn):
# sp_mu is a dict, where key is species, val is net stoichiometric coefficient
sp_mu = reaction(id_rxn).products
for sp in reaction(id_rxn).reactants.keys():
mu = reaction(id_rxn).reactants[sp]
if sp in sp_mu.keys():
sp_mu[sp] -= mu
else:
sp_mu[sp] = -mu
# -----------------------
# produced is a dict, where key is sp, val is number of traced atoms
# being transferred when this sp is produced
produced = {}
consumed = {}
for sp in sp_mu.keys():
atoms = species(sp).composition
if traced_element in atoms.keys():
n = int(sp_mu[sp] * atoms[traced_element] * np.sign(rr[id_rxn]))
if n > 0:
produced[sp] = abs(n)
elif n < 0:
consumed[sp] = abs(n)
# -----------------------
# consider this reaction only when traced element is transferred
# note "if bool(consumed)" works the same way
if bool(produced):
n_sum = sum(produced.values())
for target in produced.keys():
for source in consumed.keys():
n_i2j = 1.0 * produced[target] * consumed[source] / n_sum
# note that the direction (source-->target) is already assured
# therefore we use abs(RR) here
dw = float(n_i2j * abs(rr[id_rxn]))
try:
flux_graph[source][target]['flux'] += dw
except KeyError:
# if this edge doesn't exist, create it
flux_graph.add_edge(source, target)
flux_graph[source][target]['flux'] = dw
flux_graph[source][target]['member'] = {}
flux_graph[source][target]['member'][str(id_rxn)] = dw
flux_graph[source][target]['1/flux'] = 1.0 / flux_graph[source][target]['flux']
# -------------------------------------
# save the graph using json, which is fast, and human-readable
data = json_graph.node_link_data(flux_graph)
json.dump(data, open(path_save, 'w'))
#print 'graph saved as',path_save
return flux_graph<|fim▁end|>
|
:param raw: type = dict, keys include "mole_fraction", "net_reaction_rate", etc
:param traced_element: type = str
|
<|file_name|>authentication.service.ts<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file<|fim▁hole|> * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import {EventEmitter, Injectable} from '@angular/core';
import {ArchivaRequestService} from "./archiva-request.service";
import {AccessToken} from "../model/access-token";
import {environment} from "../../environments/environment";
import {ErrorMessage} from "../model/error-message";
import {ErrorResult} from "../model/error-result";
import {HttpErrorResponse} from "@angular/common/http";
import {UserService} from "./user.service";
import {UserInfo} from "../model/user-info";
/**
* The AuthenticationService handles user authentication and stores user data after successful login
*/
@Injectable({
providedIn: 'root'
})
export class AuthenticationService {
authenticated: boolean;
authenticating: boolean;
/**
* The LoginEvent is emitted, when a successful login happened. And the corresponding user info was retrieved.
*/
public LoginEvent: EventEmitter<UserInfo> = new EventEmitter<UserInfo>();
/**
* The LogoutEvent is emitted, when the user has been logged out.
*/
public LogoutEvent: EventEmitter<any> = new EventEmitter<any>();
constructor(private rest: ArchivaRequestService,
private userService: UserService) {
this.authenticated = false;
this.LoginEvent.subscribe((info)=>{
this.authenticating=false;
})
this.LogoutEvent.subscribe(()=>{
this.authenticating=false;
})
this.restoreLoginData();
}
private restoreLoginData() {
console.debug("Restoring login data");
let accessToken = localStorage.getItem("access_token");
if (accessToken != null) {
let expirationDate = localStorage.getItem("token_expire");
if (expirationDate != null) {
let expDate = new Date(expirationDate);
let currentDate = new Date();
if (currentDate < expDate) {
console.debug("Retrieving user information");
this.authenticating=true;
let observer = this.userService.retrieveUserInfo();
observer.subscribe({
next: (userInfo: UserInfo) => {
if (userInfo != null) {
let permObserver = this.userService.retrievePermissionInfo();
permObserver.subscribe({
next: () => {
this.authenticated = true;
this.LoginEvent.emit(userInfo)
},
error: (err) => {
console.debug("Error retrieving perms: " + JSON.stringify(err));
}
}
)
}
},
error: (err: HttpErrorResponse) => {
console.debug("Error retrieving user info: " + JSON.stringify(err));
this.logout();
},
complete: () => {
this.authenticating=false;
}
}
);
} else {
this.logout();
}
} else {
this.logout();
}
}
}
/**
* Tries to login by sending the login data to the REST service. If the login was successful the access
* and refresh token is stored locally.
*
* @param userid The user id for the login
* @param password The password
* @param resultHandler A result handler that is executed, after calling the login service
*/
login(userid: string, password: string, resultHandler: (n: string, err?: ErrorMessage[]) => void) {
this.authenticating=true;
const data = {
'grant_type': 'authorization_code',
'client_id': environment.application.client_id,
'user_id': userid, 'password': password
};
let authObserver = this.rest.executeRestCall<AccessToken>('post', 'redback', 'auth/authenticate', data);
let tokenObserver = {
next: (x: AccessToken) => {
localStorage.setItem("access_token", x.access_token);
localStorage.setItem("refresh_token", x.refresh_token);
if (x.expires_in != null) {
let dt = new Date();
dt.setSeconds(dt.getSeconds() + x.expires_in);
localStorage.setItem("token_expire", dt.toISOString());
}
let userObserver = this.userService.retrieveUserInfo();
this.authenticated = true;
userObserver.subscribe(userInfo => {
if (userInfo != null) {
let permObserver = this.userService.retrievePermissionInfo();
permObserver.subscribe((perms) => {
this.LoginEvent.emit(userInfo);
}
)
}
});
resultHandler("OK");
},
error: (err: HttpErrorResponse) => {
this.authenticating = false;
console.log("Error " + (JSON.stringify(err)));
let result = err.error as ErrorResult
if (result.error_messages != null) {
for (let msg of result.error_messages) {
console.debug('Observer got an error: ' + msg.error_key)
}
resultHandler("ERROR", result.error_messages);
} else {
resultHandler("ERROR", null);
}
},
complete: () => {
this.authenticating = false;
}
};
authObserver.subscribe(tokenObserver)
}
/**
* Resets the stored user data
*/
logout() {
localStorage.removeItem("access_token");
localStorage.removeItem("refresh_token");
localStorage.removeItem("token_expire");
this.authenticated = false;
this.userService.resetUser();
this.rest.resetToken();
this.LogoutEvent.emit();
}
}<|fim▁end|>
|
* distributed with this work for additional information
|
<|file_name|>show_tests.py<|end_file_name|><|fim▁begin|># coding=utf-8
# This file is part of SickChill.
#
# URL: https://sickchill.github.io
# Git: https://github.com/SickChill/SickChill.git
#
# SickChill is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickChill is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickChill. If not, see <http://www.gnu.org/licenses/>.
"""
Test shows
"""
# pylint: disable=line-too-long
from __future__ import print_function, unicode_literals
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../../lib')))
sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), '../../..')))
import sickbeard
import six
from sickbeard.common import Quality
from sickbeard.tv import TVShow
from sickchill.helper.exceptions import MultipleShowObjectsException
from sickchill.show.Show import Show
class ShowTests(unittest.TestCase):
"""
Test shows
"""
def test_find(self):
"""
Test find tv shows by indexer_id
"""
sickbeard.QUALITY_DEFAULT = Quality.FULLHDTV
sickbeard.showList = []
show123 = TestTVShow(0, 123)
show456 = TestTVShow(0, 456)
show789 = TestTVShow(0, 789)
shows = [show123, show456, show789]
shows_duplicate = shows + shows
test_cases = {
(False, None): None,
(False, ''): None,
(False, '123'): None,
(False, 123): None,
(False, 12.3): None,
(True, None): None,
(True, ''): None,
(True, '123'): None,
(True, 123): show123,
(True, 12.3): None,
(True, 456): show456,
(True, 789): show789,
}
unicode_test_cases = {<|fim▁hole|> (True, '123'): None,
}
for tests in test_cases, unicode_test_cases:
for ((use_shows, indexer_id), result) in six.iteritems(tests):
if use_shows:
self.assertEqual(Show.find(shows, indexer_id), result)
else:
self.assertEqual(Show.find(None, indexer_id), result)
with self.assertRaises(MultipleShowObjectsException):
Show.find(shows_duplicate, 456)
def test_validate_indexer_id(self):
"""
Tests if the indexer_id is valid and if so if it returns the right show
"""
sickbeard.QUALITY_DEFAULT = Quality.FULLHDTV
sickbeard.showList = []
show123 = TestTVShow(0, 123)
show456 = TestTVShow(0, 456)
show789 = TestTVShow(0, 789)
sickbeard.showList = [
show123,
show456,
show789,
]
invalid_show_id = ('Invalid show ID', None)
indexer_id_list = [
None, '', '', '123', '123', '456', '456', '789', '789', 123, 456, 789, ['123', '456'], ['123', '456'],
[123, 456]
]
results_list = [
invalid_show_id, invalid_show_id, invalid_show_id, (None, show123), (None, show123), (None, show456),
(None, show456), (None, show789), (None, show789), (None, show123), (None, show456), (None, show789),
invalid_show_id, invalid_show_id, invalid_show_id
]
self.assertEqual(
len(indexer_id_list), len(results_list),
'Number of parameters ({0:d}) and results ({1:d}) does not match'.format(len(indexer_id_list), len(results_list))
)
for (index, indexer_id) in enumerate(indexer_id_list):
self.assertEqual(Show._validate_indexer_id(indexer_id), results_list[index]) # pylint: disable=protected-access
class TestTVShow(TVShow):
"""
A test `TVShow` object that does not need DB access.
"""
def __init__(self, indexer, indexer_id):
super(TestTVShow, self).__init__(indexer, indexer_id)
def loadFromDB(self):
"""
Override TVShow.loadFromDB to avoid DB access during testing
"""
pass
if __name__ == '__main__':
print('=====> Testing {0}'.format(__file__))
SUITE = unittest.TestLoader().loadTestsFromTestCase(ShowTests)
unittest.TextTestRunner(verbosity=2).run(SUITE)<|fim▁end|>
|
(False, ''): None,
(False, '123'): None,
(True, ''): None,
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>from django.contrib import admin
from Blogs.models import(Post, Comment)<|fim▁hole|>admin.site.register(Comment)<|fim▁end|>
|
admin.site.register(Post)
|
<|file_name|>rsh_driver.cpp<|end_file_name|><|fim▁begin|>/*
Copyright (C) 2011 Equinor ASA, Norway.
The file 'rsh_driver.c' is part of ERT - Ensemble based Reservoir Tool.
ERT is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
ERT is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License at <http://www.gnu.org/licenses/gpl.html>
for more details.
*/
#include <stdlib.h>
#include <signal.h>
#include <stdio.h>
#include <string.h>
#include <sys/socket.h>
#include <netdb.h>
#include <pthread.h>
#include <errno.h>
#include <ert/util/util.hpp>
#include <ert/res_util/arg_pack.hpp>
#include <ert/job_queue/queue_driver.hpp>
#include <ert/job_queue/rsh_driver.hpp>
struct rsh_job_struct {
UTIL_TYPE_ID_DECLARATION;
bool active; /* Means that it allocated - not really in use */
job_status_type status;
pthread_t run_thread;
const char * host_name; /* Currently not set */
char * run_path;
};
typedef struct {
char * host_name;
int max_running; /* How many can the host handle. */
int running; /* How many are currently running on the host (goverened by this driver instance that is). */
pthread_mutex_t host_mutex;
} rsh_host_type;
#define RSH_DRIVER_TYPE_ID 44963256
#define RSH_JOB_TYPE_ID 63256701
struct rsh_driver_struct {
UTIL_TYPE_ID_DECLARATION;
pthread_mutex_t submit_lock;
pthread_attr_t thread_attr;
char * rsh_command;
int num_hosts;
int last_host_index;
rsh_host_type **host_list;
hash_type *__host_hash; /* Stupid redundancy ... */
};
/******************************************************************/
static UTIL_SAFE_CAST_FUNCTION_CONST( rsh_driver , RSH_DRIVER_TYPE_ID )
static UTIL_SAFE_CAST_FUNCTION( rsh_driver , RSH_DRIVER_TYPE_ID )
static UTIL_SAFE_CAST_FUNCTION( rsh_job , RSH_JOB_TYPE_ID )
/**
If the host is for some reason not available, NULL should be
returned. Will also return NULL if some funny guy tries to allocate
with max_running <= 0.
*/
static rsh_host_type * rsh_host_alloc(const char * host_name , int max_running) {
if (max_running > 0) {
struct addrinfo * result;
if (getaddrinfo(host_name , NULL , NULL , &result) == 0) {
rsh_host_type * host = (rsh_host_type*)util_malloc(sizeof * host );
host->host_name = util_alloc_string_copy(host_name);
host->max_running = max_running;
host->running = 0;
pthread_mutex_init( &host->host_mutex , NULL );
freeaddrinfo( result );
return host;
} else {
fprintf(stderr,"** Warning: could not locate server: %s \n",host_name);
return NULL;
}
} else
return NULL;
}
static void rsh_host_free(rsh_host_type * rsh_host) {
free(rsh_host->host_name);
free(rsh_host);
}
static bool rsh_host_available(rsh_host_type * rsh_host) {
bool available;
pthread_mutex_lock( &rsh_host->host_mutex );
{
available = false;
if ((rsh_host->max_running - rsh_host->running) > 0) { // The host has free slots()
available = true;
rsh_host->running++;
}
}
pthread_mutex_unlock( &rsh_host->host_mutex );
return available;
}
static void rsh_host_submit_job(rsh_host_type * rsh_host , rsh_job_type * job, const char * rsh_cmd , const char * submit_cmd , int num_cpu , int job_argc , const char ** job_argv) {
/*
Observe that this job has already been added to the running jobs
in the rsh_host_available function.
*/
int argc = job_argc + 2;
const char ** argv = (const char**)util_malloc( argc * sizeof * argv );
argv[0] = rsh_host->host_name;
argv[1] = submit_cmd;
{
int iarg;
for (iarg = 0; iarg < job_argc; iarg++)
argv[iarg + 2] = job_argv[iarg];
}
util_spawn_blocking(rsh_cmd, argc, argv, NULL, NULL); /* This call is blocking. */
job->status = JOB_QUEUE_DONE;
pthread_mutex_lock( &rsh_host->host_mutex );
rsh_host->running--;
pthread_mutex_unlock( &rsh_host->host_mutex );
free( argv );
}
/*
static const char * rsh_host_get_hostname(const rsh_host_type * host) { return host->host_name; }
*/
static void * rsh_host_submit_job__(void * __arg_pack) {
arg_pack_type * arg_pack = arg_pack_safe_cast(__arg_pack);
char * rsh_cmd = (char *) arg_pack_iget_ptr(arg_pack , 0);
rsh_host_type * rsh_host = (rsh_host_type *)arg_pack_iget_ptr(arg_pack , 1);
char * submit_cmd = (char *) arg_pack_iget_ptr(arg_pack , 2);
int num_cpu = arg_pack_iget_int(arg_pack , 3);
int argc = arg_pack_iget_int(arg_pack , 4);
const char ** argv = (const char **) arg_pack_iget_ptr(arg_pack , 5);
rsh_job_type * job = (rsh_job_type*) arg_pack_iget_ptr(arg_pack , 6);
rsh_host_submit_job(rsh_host , job , rsh_cmd , submit_cmd , num_cpu , argc , argv);
pthread_exit( NULL );
arg_pack_free( arg_pack );
}
/*****************************************************************/
/*****************************************************************/
rsh_job_type * rsh_job_alloc(const char * run_path) {
rsh_job_type * job;
job = (rsh_job_type*)util_malloc(sizeof * job );
job->active = false;
job->status = JOB_QUEUE_WAITING;
job->run_path = util_alloc_string_copy(run_path);
UTIL_TYPE_ID_INIT( job , RSH_JOB_TYPE_ID );
return job;
}
void rsh_job_free(rsh_job_type * job) {
free(job->run_path);
free(job);
}
job_status_type rsh_driver_get_job_status(void * __driver , void * __job) {
if (__job == NULL)
/* The job has not been registered at all ... */
return JOB_QUEUE_NOT_ACTIVE;
else {
rsh_job_type * job = rsh_job_safe_cast( __job );
{
if (job->active == false) {
util_abort("%s: internal error - should not query status on inactive jobs \n" , __func__);
return JOB_QUEUE_NOT_ACTIVE; /* Dummy to shut up compiler */
} else
return job->status;
}
}
}
void rsh_driver_free_job( void * __job ) {
rsh_job_type * job = rsh_job_safe_cast( __job );
rsh_job_free(job);
}
void rsh_driver_kill_job(void * __driver ,void * __job) {
rsh_job_type * job = rsh_job_safe_cast( __job );
if (job->active)
pthread_cancel( job->run_thread );
rsh_job_free( job );
}
void * rsh_driver_submit_job(void * __driver,
const char * submit_cmd ,
int num_cpu , /* Ignored */
const char * run_path ,
const char * job_name ,
int argc,
const char ** argv ) {
rsh_driver_type * driver = rsh_driver_safe_cast( __driver );
rsh_job_type * job = NULL;
{
/*
command is freed in the start_routine() function
*/
pthread_mutex_lock( &driver->submit_lock );
{
rsh_host_type * host = NULL;
int ihost;
int host_index = 0;
if (driver->num_hosts == 0)
util_abort("%s: fatal error - no hosts added to the rsh driver.\n",__func__);
for (ihost = 0; ihost < driver->num_hosts; ihost++) {
host_index = (ihost + driver->last_host_index) % driver->num_hosts;
if (rsh_host_available(driver->host_list[host_index])) {
host = driver->host_list[host_index];
break;
}
}
driver->last_host_index = (host_index + 1) % driver->num_hosts;
if (host != NULL) {
/* A host is available */
arg_pack_type * arg_pack = arg_pack_alloc(); /* The arg_pack is freed() in the rsh_host_submit_job__() function.
freeing it here is dangerous, because we might free it before the
thread-called function is finished with it. */
job = rsh_job_alloc(run_path);
arg_pack_append_ptr(arg_pack , driver->rsh_command);
arg_pack_append_ptr(arg_pack , host);
arg_pack_append_ptr(arg_pack , (char *) submit_cmd);
arg_pack_append_int(arg_pack , num_cpu );
arg_pack_append_int(arg_pack , argc );
arg_pack_append_ptr(arg_pack , argv );
arg_pack_append_ptr(arg_pack , job);
{
int pthread_return_value = pthread_create( &job->run_thread , &driver->thread_attr , rsh_host_submit_job__ , arg_pack);
if (pthread_return_value != 0)
util_abort("%s failed to create thread ERROR:%d \n", __func__ , pthread_return_value);
}
job->status = JOB_QUEUE_RUNNING;
job->active = true;
}
}
pthread_mutex_unlock( &driver->submit_lock );
}
return job;
}
void rsh_driver_clear_host_list( rsh_driver_type * driver ) {
int ihost;
for (ihost =0; ihost < driver->num_hosts; ihost++)
rsh_host_free(driver->host_list[ihost]);
free(driver->host_list);
driver->num_hosts = 0;
driver->host_list = NULL;
driver->last_host_index = 0;
}
void rsh_driver_free(rsh_driver_type * driver) {
rsh_driver_clear_host_list( driver );
pthread_attr_destroy ( &driver->thread_attr );
free(driver->rsh_command );
hash_free( driver->__host_hash );
free(driver);
driver = NULL;
}
void rsh_driver_free__(void * __driver) {
rsh_driver_type * driver = rsh_driver_safe_cast( __driver );
rsh_driver_free( driver );
}
void rsh_driver_set_host_list( rsh_driver_type * rsh_driver , const hash_type * rsh_host_list) {
rsh_driver_clear_host_list( rsh_driver );
if (rsh_host_list != NULL) {
hash_iter_type * hash_iter = hash_iter_alloc( rsh_host_list );
while (!hash_iter_is_complete( hash_iter )) {
const char * host = hash_iter_get_next_key( hash_iter );
int max_running = hash_get_int( rsh_host_list , host );
rsh_driver_add_host(rsh_driver , host , max_running);
}
if (rsh_driver->num_hosts == 0)
util_abort("%s: failed to add any valid RSH hosts - aborting.\n",__func__);
}
}
/**
*/
void * rsh_driver_alloc( ) {
rsh_driver_type * rsh_driver = (rsh_driver_type*)util_malloc( sizeof * rsh_driver );
UTIL_TYPE_ID_INIT( rsh_driver , RSH_DRIVER_TYPE_ID );
pthread_mutex_init( &rsh_driver->submit_lock , NULL );
pthread_attr_init( &rsh_driver->thread_attr );
pthread_attr_setdetachstate( &rsh_driver->thread_attr , PTHREAD_CREATE_DETACHED );
/**
To simplify the Python wrapper it is possible to pass in NULL as
rsh_host_list pointer, and then subsequently add hosts with
rsh_driver_add_host().
*/
rsh_driver->num_hosts = 0;
rsh_driver->host_list = NULL;
rsh_driver->last_host_index = 0;
rsh_driver->rsh_command = NULL;
rsh_driver->__host_hash = hash_alloc();
return rsh_driver;
}
void rsh_driver_add_host(rsh_driver_type * rsh_driver , const char * hostname , int host_max_running) {
rsh_host_type * new_host = rsh_host_alloc(hostname , host_max_running); /* Could in principle update an existing node if the host name is old. */
if (new_host != NULL) {
rsh_driver->num_hosts++;
rsh_driver->host_list = (rsh_host_type**)util_realloc(rsh_driver->host_list , rsh_driver->num_hosts * sizeof * rsh_driver->host_list );
rsh_driver->host_list[(rsh_driver->num_hosts - 1)] = new_host;
}
}
/**
Hostname should be a string as host:max_running, the ":max_running"
part is optional, and will default to 1.
*/
void rsh_driver_add_host_from_string(rsh_driver_type * rsh_driver , const char * hostname) {
int host_max_running;
char ** tmp;
char * host;
int tokens;
util_split_string( hostname , ":" , &tokens , &tmp);
if (tokens > 1) {
if (!util_sscanf_int( tmp[tokens - 1] , &host_max_running))
util_abort("%s: failed to parse out integer from: %s \n",__func__ , hostname);
host = util_alloc_joined_string((const char **) tmp , tokens - 1 , ":");
} else
host = util_alloc_string_copy( tmp[0] );
rsh_driver_add_host( rsh_driver , host , host_max_running );
util_free_stringlist( tmp , tokens );
free( host );
}
bool rsh_driver_set_option( void * __driver , const char * option_key , const void * value_ ) {
const char * value = (const char*) value_;
rsh_driver_type * driver = rsh_driver_safe_cast( __driver );
bool has_option = true;
{
if (strcmp(RSH_HOST , option_key) == 0) /* Add one host - value should be hostname:max */
rsh_driver_add_host_from_string( driver , value );
else if (strcmp(RSH_HOSTLIST , option_key) == 0) { /* Set full host list - value should be hash of integers. */
if (value != NULL) {
const hash_type * hash_value = hash_safe_cast_const( value );
rsh_driver_set_host_list( driver , hash_value );
}
} else if (strcmp( RSH_CLEAR_HOSTLIST , option_key) == 0)
/* Value is not considered - this is an action, and not a _set operation. */
rsh_driver_set_host_list( driver , NULL );
else if (strcmp( RSH_CMD , option_key) == 0)
driver->rsh_command = util_realloc_string_copy( driver->rsh_command , value );
else
has_option = false;<|fim▁hole|>}
const void * rsh_driver_get_option( const void * __driver , const char * option_key ) {
const rsh_driver_type * driver = rsh_driver_safe_cast_const( __driver );
{
if (strcmp( RSH_CMD , option_key ) == 0)
return driver->rsh_command;
else if (strcmp( RSH_HOSTLIST , option_key) == 0) {
int ihost;
hash_clear( driver->__host_hash );
for (ihost = 0; ihost < driver->num_hosts; ihost++) {
rsh_host_type * host = driver->host_list[ ihost ];
hash_insert_int( driver->__host_hash , host->host_name , host->max_running);
}
return driver->__host_hash;
} else {
util_abort("%s: get not implemented fro option_id:%s for rsh \n",__func__ , option_key );
return NULL;
}
}
}
void rsh_driver_init_option_list(stringlist_type * option_list) {
stringlist_append_copy(option_list, RSH_HOST);
stringlist_append_copy(option_list, RSH_HOSTLIST);
stringlist_append_copy(option_list, RSH_CMD);
stringlist_append_copy(option_list, RSH_CLEAR_HOSTLIST);
}
#undef RSH_JOB_ID
/*****************************************************************/<|fim▁end|>
|
}
return has_option;
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>module.exports = {
Device: Device
}<|fim▁end|>
|
'use strict'
// import Device from './Device'
let Device = require('./Device').Device
|
<|file_name|>SuperUglyNumber.java<|end_file_name|><|fim▁begin|>package problems;
import java.util.Arrays;
import java.util.PriorityQueue;
/**
* Leetcode: Super Ugly Number
* Created by alan on 2/24/2016.
*/
public class SuperUglyNumber {
class Node implements Comparable<Node> {
int val;
final int prime_index;
public Node(int value, int prime_idx) {
this.val = value;
this.prime_index = prime_idx;
}
public int compareTo(Node a) {
return this.val - a.val;
}
}
public int[] nthSuperUglyNumber(int n, int[] primes) {
int[] nums = new int[n];
nums[0] = 1;
int[] index = new int[primes.length];<|fim▁hole|> for (int i = 0; i < primes.length; i++)
pq.add(new Node(primes[i], i));
for (int i = 1; i < n; i++) {
Node node = pq.poll();
while (node.val == nums[i - 1]) {
node.val = nums[++index[node.prime_index]] * primes[node.prime_index];
pq.add(node);
node = pq.poll();
}
nums[i] = node.val;
node.val = nums[++index[node.prime_index]] * primes[node.prime_index];
pq.add(node);
}
return nums;
}
public static void main(String[] args) {
SuperUglyNumber sn = new SuperUglyNumber();
int[] primes = {2, 7, 13, 19};
System.out.println(Arrays.toString(primes));
System.out.println(Arrays.toString(sn.nthSuperUglyNumber(12, primes)));
}
}<|fim▁end|>
|
PriorityQueue<Node> pq = new PriorityQueue<>();
|
<|file_name|>test_template.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#pylint: disable=missing-docstring
####################################################################################################
# DO NOT MODIFY THIS HEADER #
# MOOSE - Multiphysics Object Oriented Simulation Environment #
# #
# (c) 2010 Battelle Energy Alliance, LLC #
# ALL RIGHTS RESERVED #
# #
# Prepared by Battelle Energy Alliance, LLC #
# Under Contract No. DE-AC07-05ID14517 #
# With the U. S. Department of Energy #
# #
# See COPYRIGHT for full restrictions #
####################################################################################################
import unittest
import bs4<|fim▁hole|>
import MooseDocs
from MooseDocs.common import moose_docs_file_tree
from MooseDocs.testing import MarkdownTestCase
class TestTemplate(MarkdownTestCase):
EXTENSIONS = ['MooseDocs.extensions.template', 'MooseDocs.extensions.app_syntax', 'meta']
@classmethod
def updateExtensions(cls, configs):
"""
Method to change the arguments that come from the configuration file for
specific tests. This way one can test optional arguments without permanently
changing the configuration file.
"""
configs['MooseDocs.extensions.template']['template'] = 'testing.html'
configs['MooseDocs.extensions.app_syntax']['hide']['framework'].append('/Functions')
configs['MooseDocs.extensions.app_syntax']['hide']['phase_field'].append('/ICs')
@classmethod
def setUpClass(cls):
super(TestTemplate, cls).setUpClass()
# Use BoxMarker.md to test Doxygen and Code lookups
config = dict(base='docs/content',
include=['docs/content/documentation/systems/Adaptivity/Markers/*'])
root = moose_docs_file_tree({'framework': config})
node = root.findall('/BoxMarker')[0]
cls.html = cls.parser.convert(node)
#with open(node.markdown(), 'r') as fid:
# cls.html = fid.read()
cls.soup = bs4.BeautifulSoup(cls.html, "html.parser")
def testContent(self):
self.assertIsNotNone(self.soup.find('h1'))
self.assertIn('BoxMarker', self.html)
def testDoxygen(self):
a = str(self.soup)
self.assertIsNotNone(a)
self.assertIn('classBoxMarker.html', a)
self.assertIn('Doxygen', a)
def testCode(self):
html = str(self.soup)
self.assertIn('href="https://github.com/idaholab/moose/blob/master/framework/include/'\
'markers/BoxMarker.h"', html)
self.assertIn('href="https://github.com/idaholab/moose/blob/master/framework/src/'\
'markers/BoxMarker.C"', html)
def testHidden(self):
md = '!syntax objects /Functions'
html = self.convert(md)
gold = '<a class="moose-bad-link" data-moose-disable-link-error="1" ' \
'href="/Functions/framework/ParsedVectorFunction.md">ParsedVectorFunction</a>'
self.assertIn(gold.format(MooseDocs.MOOSE_DIR.rstrip('/')), html)
def testPolycrystalICs(self):
md = '[Foo](/ICs/PolycrystalICs/index.md)'
html = self.convert(md)
gold = '<a class="moose-bad-link" href="/ICs/PolycrystalICs/index.md">'
self.assertIn(gold, html)
if __name__ == '__main__':
unittest.main(verbosity=2)<|fim▁end|>
| |
<|file_name|>gunicorn_config.py<|end_file_name|><|fim▁begin|># gunicorn configuration
bind = '0.0.0.0:8000'
workers = 3
# These log settings assume that gunicorn log config will be included in the django base.py logging configuration
accesslog = '-'<|fim▁hole|>access_log_format = '{"request": "%(r)s", "http_status_code": "%(s)s", "http_request_url": "%(U)s", "http_query_string": "%(q)s", "http_verb": "%(m)s", "http_version": "%(H)s", "http_referer": "%(f)s", "x_forwarded_for": "%({x-forwarded-for}i)s", "remote_address": "%(h)s", "request_usec": "%(D)s", "request_sec": "%(L)s"}'<|fim▁end|>
|
errorlog = '-'
|
<|file_name|>trait-implementations.rs<|end_file_name|><|fim▁begin|>// compile-flags:-Zprint-mono-items=eager
#![deny(dead_code)]
#![feature(start)]
pub trait SomeTrait {
fn foo(&self);
fn bar<T>(&self, x: T);
}
impl SomeTrait for i64 {
//~ MONO_ITEM fn <i64 as SomeTrait>::foo
fn foo(&self) {}
fn bar<T>(&self, _: T) {}<|fim▁hole|>
impl SomeTrait for i32 {
//~ MONO_ITEM fn <i32 as SomeTrait>::foo
fn foo(&self) {}
fn bar<T>(&self, _: T) {}
}
pub trait SomeGenericTrait<T> {
fn foo(&self, x: T);
fn bar<T2>(&self, x: T, y: T2);
}
// Concrete impl of generic trait
impl SomeGenericTrait<u32> for f64 {
//~ MONO_ITEM fn <f64 as SomeGenericTrait<u32>>::foo
fn foo(&self, _: u32) {}
fn bar<T2>(&self, _: u32, _: T2) {}
}
// Generic impl of generic trait
impl<T> SomeGenericTrait<T> for f32 {
fn foo(&self, _: T) {}
fn bar<T2>(&self, _: T, _: T2) {}
}
//~ MONO_ITEM fn start
#[start]
fn start(_: isize, _: *const *const u8) -> isize {
//~ MONO_ITEM fn <i32 as SomeTrait>::bar::<char>
0i32.bar('x');
//~ MONO_ITEM fn <f64 as SomeGenericTrait<u32>>::bar::<&str>
0f64.bar(0u32, "&str");
//~ MONO_ITEM fn <f64 as SomeGenericTrait<u32>>::bar::<()>
0f64.bar(0u32, ());
//~ MONO_ITEM fn <f32 as SomeGenericTrait<char>>::foo
0f32.foo('x');
//~ MONO_ITEM fn <f32 as SomeGenericTrait<i64>>::foo
0f32.foo(-1i64);
//~ MONO_ITEM fn <f32 as SomeGenericTrait<u32>>::bar::<()>
0f32.bar(0u32, ());
//~ MONO_ITEM fn <f32 as SomeGenericTrait<&str>>::bar::<&str>
0f32.bar("&str", "&str");
0
}<|fim▁end|>
|
}
|
<|file_name|>KURLCFNet.cpp<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2004, 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "KURL.h"
#include <wtf/RetainPtr.h>
#include <CoreFoundation/CFURL.h>
using namespace std;
namespace WebCore {
typedef Vector<char, 512> CharBuffer;
CFURLRef createCFURLFromBuffer(const CharBuffer&);
KURL::KURL(CFURLRef url)
{
if (!url) {
invalidate();
return;
}
CFIndex bytesLength = CFURLGetBytes(url, 0, 0);
Vector<char, 512> buffer(bytesLength + 1);
char* bytes = &buffer[0];
CFURLGetBytes(url, reinterpret_cast<UInt8*>(bytes), bytesLength);
bytes[bytesLength] = '\0';
#if !USE(WTFURL)
parse(bytes);
#else
// FIXME: Add WTFURL Implementation.
UNUSED_PARAM(url);
invalidate();
#endif // USE(WTFURL)
}
CFURLRef createCFURLFromBuffer(const CharBuffer& buffer)
{
// NOTE: We use UTF-8 here since this encoding is used when computing strings when returning URL components
// (e.g calls to NSURL -path). However, this function is not tolerant of illegal UTF-8 sequences, which
// could either be a malformed string or bytes in a different encoding, like Shift-JIS, so we fall back
// onto using ISO Latin-1 in those cases.
CFURLRef result = CFURLCreateAbsoluteURLWithBytes(0, reinterpret_cast<const UInt8*>(buffer.data()), buffer.size(), kCFStringEncodingUTF8, 0, true);
if (!result)
result = CFURLCreateAbsoluteURLWithBytes(0, reinterpret_cast<const UInt8*>(buffer.data()), buffer.size(), kCFStringEncodingISOLatin1, 0, true);
return result;
}
#if !PLATFORM(MAC) && !(PLATFORM(QT) && USE(QTKIT))
CFURLRef KURL::createCFURL() const
{
#if !USE(WTFURL)
// FIXME: What should this return for invalid URLs?
// Currently it throws away the high bytes of the characters in the string in that case,
// which is clearly wrong.
CharBuffer buffer;
copyToBuffer(buffer);
return createCFURLFromBuffer(buffer);
#else // USE(WTFURL)
// FIXME: Add WTFURL Implementation.
return 0;
#endif
}
#endif
#if !USE(WTFURL) && !(PLATFORM(QT) && USE(QTKIT))
String KURL::fileSystemPath() const
{
RetainPtr<CFURLRef> cfURL(AdoptCF, createCFURL());
if (!cfURL)
return String();
#if PLATFORM(WIN)
CFURLPathStyle pathStyle = kCFURLWindowsPathStyle;
#else
CFURLPathStyle pathStyle = kCFURLPOSIXPathStyle;
#endif
return RetainPtr<CFStringRef>(AdoptCF, CFURLCopyFileSystemPath(cfURL.get(), pathStyle)).get();
}
#endif<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>test_param_methods.py<|end_file_name|><|fim▁begin|>"""
Testing for enumerate_param, enumerate_params, and enumerate_keyed_param
"""
import unittest
import mws
# pylint: disable=invalid-name
class TestParamsRaiseExceptions(unittest.TestCase):
"""
Simple test that asserts a ValueError is raised by an improper entry to
`utils.enumerate_keyed_param`.
"""
def test_keyed_param_fails_without_dict(self):
"""
Should raise ValueError for values not being a dict.<|fim▁hole|> mws.utils.enumerate_keyed_param(param, values)
def test_single_param_default():
"""
Test each method type for their default empty dicts.
"""
# Single
assert mws.utils.enumerate_param("something", []) == {}
# Multi
assert mws.utils.enumerate_params() == {}
assert mws.utils.enumerate_params("antler") == {}
# Keyed
assert mws.utils.enumerate_keyed_param("acorn", []) == {}
def test_single_param_not_dotted_list_values():
"""
A param string with no dot at the end and a list of ints.
List should be ingested in order.
"""
param = "SomethingOrOther"
values = (123, 765, 3512, 756437, 3125)
result = mws.utils.enumerate_param(param, values)
assert result == {
"SomethingOrOther.1": 123,
"SomethingOrOther.2": 765,
"SomethingOrOther.3": 3512,
"SomethingOrOther.4": 756437,
"SomethingOrOther.5": 3125,
}
def test_single_param_dotted_single_value():
"""
A param string with a dot at the end and a single string value.
Values that are not list, tuple, or set should coerce to a list and provide a single output.
"""
param = "FooBar."
values = "eleven"
result = mws.utils.enumerate_param(param, values)
assert result == {
"FooBar.1": "eleven",
}
def test_multi_params():
"""
A series of params sent as a list of dicts to enumerate_params.
Each param should generate a unique set of keys and values.
Final result should be a flat dict.
"""
param1 = "Summat."
values1 = ("colorful", "cheery", "turkey")
param2 = "FooBaz.what"
values2 = "singular"
param3 = "hot_dog"
values3 = ["something", "or", "other"]
# We could test with values as a set, but we cannot be 100% of the order of the output,
# and I don't feel it necessary to flesh this out enough to account for it.
result = mws.utils.enumerate_params({
param1: values1,
param2: values2,
param3: values3,
})
assert result == {
"Summat.1": "colorful",
"Summat.2": "cheery",
"Summat.3": "turkey",
"FooBaz.what.1": "singular",
"hot_dog.1": "something",
"hot_dog.2": "or",
"hot_dog.3": "other",
}
def test_keyed_params():
"""
Asserting the result through enumerate_keyed_param is as expected.
"""
# Example:
# param = "InboundShipmentPlanRequestItems.member"
# values = [
# {'SellerSKU': 'Football2415',
# 'Quantity': 3},
# {'SellerSKU': 'TeeballBall3251',
# 'Quantity': 5},
# ...
# ]
# Returns:
# {
# 'InboundShipmentPlanRequestItems.member.1.SellerSKU': 'Football2415',
# 'InboundShipmentPlanRequestItems.member.1.Quantity': 3,
# 'InboundShipmentPlanRequestItems.member.2.SellerSKU': 'TeeballBall3251',
# 'InboundShipmentPlanRequestItems.member.2.Quantity': 5,
# ...
# }
param = "AthingToKeyUp.member"
item1 = {
"thing": "stuff",
"foo": "baz",
}
item2 = {
"thing": 123,
"foo": 908,
"bar": "hello",
}
item3 = {
"stuff": "foobarbazmatazz",
"stuff2": "foobarbazmatazz5",
}
result = mws.utils.enumerate_keyed_param(param, [item1, item2, item3])
assert result == {
"AthingToKeyUp.member.1.thing": "stuff",
"AthingToKeyUp.member.1.foo": "baz",
"AthingToKeyUp.member.2.thing": 123,
"AthingToKeyUp.member.2.foo": 908,
"AthingToKeyUp.member.2.bar": "hello",
"AthingToKeyUp.member.3.stuff": "foobarbazmatazz",
"AthingToKeyUp.member.3.stuff2": "foobarbazmatazz5",
}<|fim▁end|>
|
"""
param = "something"
values = ["this is not a dict like it should be!"]
with self.assertRaises(ValueError):
|
<|file_name|>node_table.rs<|end_file_name|><|fim▁begin|>// Copyright 2015, 2016 Parity Technologies (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
use std::mem;
use std::slice::from_raw_parts;
use std::net::{SocketAddr, ToSocketAddrs, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr};
use std::hash::{Hash, Hasher};
use std::str::{FromStr};
use std::collections::{HashMap, HashSet};
use std::fmt::{Display, Formatter};
use std::path::{PathBuf};
use std::fmt;
use std::fs;
use std::io::{Read, Write};
use util::hash::*;
use util::UtilError;
use rlp::*;
use time::Tm;
use error::NetworkError;
use AllowIP;
use discovery::{TableUpdates, NodeEntry};
use ip_utils::*;
pub use rustc_serialize::json::Json;
/// Node public key
pub type NodeId = H512;
#[derive(Debug, Clone)]
/// Node address info
pub struct NodeEndpoint {
/// IP(V4 or V6) address
pub address: SocketAddr,
/// Conneciton port.
pub udp_port: u16
}
impl NodeEndpoint {
pub fn udp_address(&self) -> SocketAddr {
match self.address {
SocketAddr::V4(a) => SocketAddr::V4(SocketAddrV4::new(a.ip().clone(), self.udp_port)),
SocketAddr::V6(a) => SocketAddr::V6(SocketAddrV6::new(a.ip().clone(), self.udp_port, a.flowinfo(), a.scope_id())),
}
}
pub fn is_allowed(&self, filter: AllowIP) -> bool {
match filter {
AllowIP::All => true,
AllowIP::Private => !self.address.ip().is_global_s(),
AllowIP::Public => self.address.ip().is_global_s(),
}
}
pub fn from_rlp(rlp: &UntrustedRlp) -> Result<Self, DecoderError> {
let tcp_port = try!(rlp.val_at::<u16>(2));
let udp_port = try!(rlp.val_at::<u16>(1));
let addr_bytes = try!(try!(rlp.at(0)).data());
let address = try!(match addr_bytes.len() {
4 => Ok(SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(addr_bytes[0], addr_bytes[1], addr_bytes[2], addr_bytes[3]), tcp_port))),
16 => unsafe {
let o: *const u16 = mem::transmute(addr_bytes.as_ptr());
let o = from_raw_parts(o, 8);
Ok(SocketAddr::V6(SocketAddrV6::new(Ipv6Addr::new(o[0], o[1], o[2], o[3], o[4], o[5], o[6], o[7]), tcp_port, 0, 0)))
},
_ => Err(DecoderError::RlpInconsistentLengthAndData)
});
Ok(NodeEndpoint { address: address, udp_port: udp_port })
}
pub fn to_rlp(&self, rlp: &mut RlpStream) {
match self.address {
SocketAddr::V4(a) => {
rlp.append(&(&a.ip().octets()[..]));
}
SocketAddr::V6(a) => unsafe {
let o: *const u8 = mem::transmute(a.ip().segments().as_ptr());
rlp.append(&from_raw_parts(o, 16));
}
};
rlp.append(&self.udp_port);
rlp.append(&self.address.port());
}
pub fn to_rlp_list(&self, rlp: &mut RlpStream) {
rlp.begin_list(3);
self.to_rlp(rlp);
}
pub fn is_valid(&self) -> bool {
self.udp_port != 0 && self.address.port() != 0 &&
match self.address {
SocketAddr::V4(a) => !a.ip().is_unspecified_s(),
SocketAddr::V6(a) => !a.ip().is_unspecified_s()
}
}
}
impl FromStr for NodeEndpoint {
type Err = NetworkError;
/// Create endpoint from string. Performs name resolution if given a host name.
fn from_str(s: &str) -> Result<NodeEndpoint, NetworkError> {
let address = s.to_socket_addrs().map(|mut i| i.next());
match address {
Ok(Some(a)) => Ok(NodeEndpoint {
address: a,
udp_port: a.port()
}),
Ok(_) => Err(NetworkError::AddressResolve(None)),
Err(e) => Err(NetworkError::AddressResolve(Some(e)))
}
}
}
#[derive(PartialEq, Eq, Copy, Clone)]
pub enum PeerType {
_Required,
Optional
}
pub struct Node {
pub id: NodeId,
pub endpoint: NodeEndpoint,
pub peer_type: PeerType,
pub failures: u32,
pub last_attempted: Option<Tm>,
}
impl Node {
pub fn new(id: NodeId, endpoint: NodeEndpoint) -> Node {
Node {
id: id,
endpoint: endpoint,
peer_type: PeerType::Optional,
failures: 0,
last_attempted: None,
}
}
}
impl Display for Node {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.endpoint.udp_port != self.endpoint.address.port() {
try!(write!(f, "enode://{}@{}+{}", self.id.hex(), self.endpoint.address, self.endpoint.udp_port));
} else {
try!(write!(f, "enode://{}@{}", self.id.hex(), self.endpoint.address));
}
Ok(())
}
}
impl FromStr for Node {
type Err = NetworkError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (id, endpoint) = if s.len() > 136 && &s[0..8] == "enode://" && &s[136..137] == "@" {
(try!(s[8..136].parse().map_err(UtilError::from)), try!(NodeEndpoint::from_str(&s[137..])))
}
else {
(NodeId::new(), try!(NodeEndpoint::from_str(s)))
};
Ok(Node {
id: id,
endpoint: endpoint,
peer_type: PeerType::Optional,
last_attempted: None,
failures: 0,
})
}
}
impl PartialEq for Node {
fn eq(&self, other: &Self) -> bool {
self.id == other.id
}
}
impl Eq for Node {}
impl Hash for Node {
fn hash<H>(&self, state: &mut H) where H: Hasher {
self.id.hash(state)
}
}
/// Node table backed by disk file.
pub struct NodeTable {
nodes: HashMap<NodeId, Node>,
useless_nodes: HashSet<NodeId>,
path: Option<String>,
}
impl NodeTable {
pub fn new(path: Option<String>) -> NodeTable {
NodeTable {
path: path.clone(),
nodes: NodeTable::load(path),
useless_nodes: HashSet::new(),
}
}
/// Add a node to table
pub fn add_node(&mut self, mut node: Node) {
// preserve failure counter
let failures = self.nodes.get(&node.id).map_or(0, |n| n.failures);
node.failures = failures;
self.nodes.insert(node.id.clone(), node);
}
/// Returns node ids sorted by number of failures
pub fn nodes(&self, filter: AllowIP) -> Vec<NodeId> {
let mut refs: Vec<&Node> = self.nodes.values().filter(|n| !self.useless_nodes.contains(&n.id) && n.endpoint.is_allowed(filter)).collect();
refs.sort_by(|a, b| a.failures.cmp(&b.failures));
refs.iter().map(|n| n.id.clone()).collect()
}
/// Unordered list of all entries
pub fn unordered_entries(&self) -> Vec<NodeEntry> {
// preserve failure counter
self.nodes.values().map(|n| NodeEntry { endpoint: n.endpoint.clone(), id: n.id.clone() }).collect()
}
/// Get particular node
pub fn get_mut(&mut self, id: &NodeId) -> Option<&mut Node> {
self.nodes.get_mut(id)
}
/// Apply table changes coming from discovery
pub fn update(&mut self, mut update: TableUpdates, reserved: &HashSet<NodeId>) {
for (_, node) in update.added.drain() {
let mut entry = self.nodes.entry(node.id.clone()).or_insert_with(|| Node::new(node.id.clone(), node.endpoint.clone()));
entry.endpoint = node.endpoint;
}
for r in update.removed {
if !reserved.contains(&r) {
self.nodes.remove(&r);
}
}
}
/// Increase failure counte for a node
pub fn note_failure(&mut self, id: &NodeId) {
if let Some(node) = self.nodes.get_mut(id) {
node.failures += 1;
}
}
/// Mark as useless, no furter attempts to connect until next call to `clear_useless`.
pub fn mark_as_useless(&mut self, id: &NodeId) {
self.useless_nodes.insert(id.clone());
}
/// Atempt to connect to useless nodes again.
pub fn clear_useless(&mut self) {
self.useless_nodes.clear();
}
/// Save the nodes.json file.
pub fn save(&self) {
if let Some(ref path) = self.path {
let mut path_buf = PathBuf::from(path);
if let Err(e) = fs::create_dir_all(path_buf.as_path()) {
warn!("Error creating node table directory: {:?}", e);
return;
};
path_buf.push("nodes.json");
let mut json = String::new();
json.push_str("{\n");
json.push_str("\"nodes\": [\n");
let node_ids = self.nodes(AllowIP::All);
for i in 0 .. node_ids.len() {
let node = self.nodes.get(&node_ids[i]).expect("self.nodes() only returns node IDs from self.nodes");
json.push_str(&format!("\t{{ \"url\": \"{}\", \"failures\": {} }}{}\n", node, node.failures, if i == node_ids.len() - 1 {""} else {","}))
}
json.push_str("]\n");
json.push_str("}");
let mut file = match fs::File::create(path_buf.as_path()) {
Ok(file) => file,
Err(e) => {
warn!("Error creating node table file: {:?}", e);
return;
}
};<|fim▁hole|> warn!("Error writing node table file: {:?}", e);
}
}
}
fn load(path: Option<String>) -> HashMap<NodeId, Node> {
let mut nodes: HashMap<NodeId, Node> = HashMap::new();
if let Some(path) = path {
let mut path_buf = PathBuf::from(path);
path_buf.push("nodes.json");
let mut file = match fs::File::open(path_buf.as_path()) {
Ok(file) => file,
Err(e) => {
debug!("Error opening node table file: {:?}", e);
return nodes;
}
};
let mut buf = String::new();
match file.read_to_string(&mut buf) {
Ok(_) => {},
Err(e) => {
warn!("Error reading node table file: {:?}", e);
return nodes;
}
}
let json = match Json::from_str(&buf) {
Ok(json) => json,
Err(e) => {
warn!("Error parsing node table file: {:?}", e);
return nodes;
}
};
if let Some(list) = json.as_object().and_then(|o| o.get("nodes")).and_then(|n| n.as_array()) {
for n in list.iter().filter_map(|n| n.as_object()) {
if let Some(url) = n.get("url").and_then(|u| u.as_string()) {
if let Ok(mut node) = Node::from_str(url) {
if let Some(failures) = n.get("failures").and_then(|f| f.as_u64()) {
node.failures = failures as u32;
}
nodes.insert(node.id.clone(), node);
}
}
}
}
}
nodes
}
}
impl Drop for NodeTable {
fn drop(&mut self) {
self.save();
}
}
/// Check if node url is valid
pub fn is_valid_node_url(url: &str) -> bool {
use std::str::FromStr;
Node::from_str(url).is_ok()
}
#[cfg(test)]
mod tests {
use super::*;
use std::net::{SocketAddr, SocketAddrV4, Ipv4Addr};
use util::H512;
use std::str::FromStr;
use devtools::*;
use AllowIP;
#[test]
fn endpoint_parse() {
let endpoint = NodeEndpoint::from_str("123.99.55.44:7770");
assert!(endpoint.is_ok());
let v4 = match endpoint.unwrap().address {
SocketAddr::V4(v4address) => v4address,
_ => panic!("should ve v4 address")
};
assert_eq!(SocketAddrV4::new(Ipv4Addr::new(123, 99, 55, 44), 7770), v4);
}
#[test]
fn node_parse() {
assert!(is_valid_node_url("enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770"));
let node = Node::from_str("enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770");
assert!(node.is_ok());
let node = node.unwrap();
let v4 = match node.endpoint.address {
SocketAddr::V4(v4address) => v4address,
_ => panic!("should ve v4 address")
};
assert_eq!(SocketAddrV4::new(Ipv4Addr::new(22, 99, 55, 44), 7770), v4);
assert_eq!(
H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap(),
node.id);
}
#[test]
fn table_failure_order() {
let node1 = Node::from_str("enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
let node2 = Node::from_str("enode://b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
let node3 = Node::from_str("enode://c979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
let id1 = H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let id2 = H512::from_str("b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let id3 = H512::from_str("c979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let mut table = NodeTable::new(None);
table.add_node(node3);
table.add_node(node1);
table.add_node(node2);
table.note_failure(&id1);
table.note_failure(&id1);
table.note_failure(&id2);
let r = table.nodes(AllowIP::All);
assert_eq!(r[0][..], id3[..]);
assert_eq!(r[1][..], id2[..]);
assert_eq!(r[2][..], id1[..]);
}
#[test]
fn table_save_load() {
let temp_path = RandomTempPath::create_dir();
let node1 = Node::from_str("enode://a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
let node2 = Node::from_str("enode://b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c@22.99.55.44:7770").unwrap();
let id1 = H512::from_str("a979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
let id2 = H512::from_str("b979fb575495b8d6db44f750317d0f4622bf4c2aa3365d6af7c284339968eef29b69ad0dce72a4d8db5ebb4968de0e3bec910127f134779fbcb0cb6d3331163c").unwrap();
{
let mut table = NodeTable::new(Some(temp_path.as_path().to_str().unwrap().to_owned()));
table.add_node(node1);
table.add_node(node2);
table.note_failure(&id2);
}
{
let table = NodeTable::new(Some(temp_path.as_path().to_str().unwrap().to_owned()));
let r = table.nodes(AllowIP::All);
assert_eq!(r[0][..], id1[..]);
assert_eq!(r[1][..], id2[..]);
}
}
}<|fim▁end|>
|
if let Err(e) = file.write(&json.into_bytes()) {
|
<|file_name|>user_event_log_proxy.cc<|end_file_name|><|fim▁begin|>/* Copyright 2008 (C) Nicira, Inc.
*
* This file is part of NOX.
*
* NOX is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* NOX is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with NOX. If not, see <http://www.gnu.org/licenses/>.
*/
/*
*/
#include "user_event_log_proxy.hh"
#include "threads/cooperative.hh"
#include "pyrt/pycontext.hh"
#include "swigpyrun.h"
#include "vlog.hh"
#include "pyrt/pyglue.hh"
using namespace std;
using namespace vigil;
using namespace vigil::applications;
namespace {
Vlog_module lg("user_event_log_proxy");
}
namespace vigil {
namespace applications {
/*
* Get a pointer to the runtime context so we can resolve
* user_event_log at configure time.
*/
user_event_log_proxy::user_event_log_proxy(PyObject* ctxt) : uel(0)
{
if (!SWIG_Python_GetSwigThis(ctxt) || !SWIG_Python_GetSwigThis(ctxt)->ptr) {
throw runtime_error("Unable to access Python context.");
}
c = ((PyContext*)SWIG_Python_GetSwigThis(ctxt)->ptr)->c;
}
void user_event_log_proxy::log_simple(const string &app_name, int level,
const string &msg){
uel->log_simple(app_name, (LogEntry::Level) level, msg);
}
void user_event_log_proxy::log(const LogEntry &entry){
uel->log(entry);
}
int user_event_log_proxy::get_max_logid() {
return uel->get_max_logid();
}
int user_event_log_proxy::get_min_logid() {
return uel->get_min_logid();
}
void user_event_log_proxy::set_max_num_entries(int num) {
uel->set_max_num_entries(num);
}
void user_event_log_proxy::python_callback(PyObject *args,
boost::intrusive_ptr<PyObject> cb) {
Co_critical_section c;
PyObject* ret = PyObject_CallObject(cb.get(), args);
if (ret == 0) {
const string exc = pretty_print_python_exception();
lg.err("Python callback invocation failed:\n%s", exc.c_str());
}
Py_DECREF(args);
Py_XDECREF(ret);
}
PyObject *user_event_log_proxy::get_log_entry(int logid, PyObject *cb){
try {
if (!cb || !PyCallable_Check(cb)) { throw "Invalid callback"; }
boost::intrusive_ptr<PyObject> cptr(cb, true);
Log_entry_callback f = boost::bind(
&user_event_log_proxy::get_log_callback,this,_1,_2,_3,_4,_5,_6,_7,cptr);
uel->get_log_entry((int64_t)logid, f);
Py_RETURN_NONE;
}
catch (const char* msg) {
/* Unable to convert the arguments. */
PyErr_SetString(PyExc_TypeError, msg);
return 0;
}
}
void user_event_log_proxy::get_log_callback(int64_t logid, int64_t ts,
const string &app,
int level, const string &msg, const PrincipalList &src_names,
const PrincipalList &dst_names, boost::intrusive_ptr<PyObject> cb) {
Co_critical_section c;
PyObject* args = PyTuple_New(7);
PyTuple_SetItem(args, 0, PyInt_FromLong(logid));
PyTuple_SetItem(args, 1, PyLong_FromLong(ts));
PyTuple_SetItem(args, 2, PyString_FromString(app.c_str()));
PyTuple_SetItem(args, 3, PyInt_FromLong(level));
PyTuple_SetItem(args, 4, PyString_FromString(msg.c_str()));
PyTuple_SetItem(args, 5, to_python_list(src_names));
PyTuple_SetItem(args, 6, to_python_list(dst_names));
python_callback(args,cb);
}
PyObject * user_event_log_proxy::get_logids_for_name(int64_t id,
int64_t type, PyObject* cb) {
try {
if (!cb || !PyCallable_Check(cb)) { throw "Invalid callback"; }
boost::intrusive_ptr<PyObject> cptr(cb, true);
Get_logids_callback f = boost::bind(
&user_event_log_proxy::get_logids_callback,this,_1,cptr);
uel->get_logids_for_name(id, (PrincipalType) type,f);
Py_RETURN_NONE;
}
catch (const char* msg) {
/* Unable to convert the arguments. */
PyErr_SetString(PyExc_TypeError, msg);
return 0;
}
}
void user_event_log_proxy::get_logids_callback(const list<int64_t> &logids,
boost::intrusive_ptr<PyObject> cb) {
Co_critical_section c;
PyObject* logid_tuple = PyTuple_New(logids.size());
list<int64_t>::const_iterator it = logids.begin();
int i = 0;
for( ; it != logids.end(); ++it) {
PyTuple_SetItem(logid_tuple, i, PyLong_FromLong(*it));
++i;
}
PyObject* args = PyTuple_New(1);
PyTuple_SetItem(args,0,logid_tuple);
python_callback(args,cb);
}
PyObject *user_event_log_proxy::clear(PyObject *cb){
try {
if (!cb || !PyCallable_Check(cb)) { throw "Invalid callback"; }
boost::intrusive_ptr<PyObject> cptr(cb, true);
Clear_log_callback f = boost::bind(
&user_event_log_proxy::clear_callback,this,_1, cptr);
uel->clear(f);
Py_RETURN_NONE;
}
catch (const char* msg) {
/* Unable to convert the arguments. */
PyErr_SetString(PyExc_TypeError, msg);
return 0;
}
}
PyObject *user_event_log_proxy::remove(int max_logid, PyObject *cb){
try {
if (!cb || !PyCallable_Check(cb)) { throw "Invalid callback"; }
boost::intrusive_ptr<PyObject> cptr(cb, true);
Clear_log_callback f = boost::bind(
&user_event_log_proxy::clear_callback,this,_1,cptr);
uel->remove(max_logid, f);
Py_RETURN_NONE;
}
catch (const char* msg) {
/* Unable to convert the arguments. */
PyErr_SetString(PyExc_TypeError, msg);
return 0;
}
}
void user_event_log_proxy::clear_callback(const storage::Result &r,
boost::intrusive_ptr<PyObject> cb){
PyObject* args = PyTuple_New(0);
python_callback(args,cb);
}
void
user_event_log_proxy::configure(PyObject* configuration)
{
c->resolve(uel);
}
void
user_event_log_proxy::install(PyObject*)
{
}
} // namespace applications<|fim▁hole|>} // namespace vigil<|fim▁end|>
| |
<|file_name|>commits.py<|end_file_name|><|fim▁begin|>#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from ..util.sampledata import package_csv
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'data',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
<|fim▁hole|>#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
def _read_data():
'''
'''
data = package_csv('commits', 'commits.txt.gz', parse_dates=True, header=None, names=['day', 'datetime'], index_col='datetime')
data = data.tz_localize('GMT').tz_convert('US/Central')
data['time'] = data.index.time
return data
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
data = _read_data()<|fim▁end|>
|
#-----------------------------------------------------------------------------
# Dev API
|
<|file_name|>app-main.js<|end_file_name|><|fim▁begin|>/**
*
* Online store PWA sample.<|fim▁hole|> * You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*
*/
import initApp from './app.js';
import {instance as router} from './router';
initApp();
router.loadCurrentRoute();<|fim▁end|>
|
* Copyright 2017 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
|
<|file_name|>cli.py<|end_file_name|><|fim▁begin|>import optparse
from os import curdir
from os.path import abspath
import sys
from autoscalebot.tasks import start_autoscaler
from autoscalebot import version
def main(args=sys.argv[1:]):
CLI_ROOT = abspath(curdir)
sys.path.insert(0, CLI_ROOT)
parser = optparse.OptionParser(
usage="%prog or type %prog -h (--help) for help",
version=version
)<|fim▁hole|> type="string",
help='settings to use when autoscaling')
options, args = parser.parse_args()
if options.settings:
settings = __import__(options.settings)
start_autoscaler(settings=settings)<|fim▁end|>
|
parser.add_option("--settings",
dest="settings",
default=None,
|
<|file_name|>htmlanchorelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::activation::Activatable;
use dom::attr::AttrValue;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::Bindings::HTMLAnchorElementBinding;
use dom::bindings::codegen::Bindings::HTMLAnchorElementBinding::HTMLAnchorElementMethods;
use dom::bindings::codegen::Bindings::MouseEventBinding::MouseEventMethods;
use dom::bindings::codegen::Bindings::NodeBinding::NodeMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{JS, MutNullableHeap, Root};
use dom::bindings::str::USVString;
use dom::document::Document;
use dom::domtokenlist::DOMTokenList;
use dom::element::Element;
use dom::event::Event;
use dom::eventtarget::EventTarget;
use dom::htmlelement::HTMLElement;
use dom::htmlimageelement::HTMLImageElement;
use dom::mouseevent::MouseEvent;
use dom::node::{Node, document_from_node, window_from_node};
use dom::urlhelper::UrlHelper;
use dom::virtualmethods::VirtualMethods;
use num_traits::ToPrimitive;
use std::default::Default;
use string_cache::Atom;
use url::Url;
use util::str::DOMString;
#[dom_struct]
pub struct HTMLAnchorElement {
htmlelement: HTMLElement,
rel_list: MutNullableHeap<JS<DOMTokenList>>,
url: DOMRefCell<Option<Url>>,
}
impl HTMLAnchorElement {
fn new_inherited(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> HTMLAnchorElement {
HTMLAnchorElement {
htmlelement:
HTMLElement::new_inherited(localName, prefix, document),
rel_list: Default::default(),
url: DOMRefCell::new(None),
}
}
#[allow(unrooted_must_root)]
pub fn new(localName: Atom,
prefix: Option<DOMString>,
document: &Document) -> Root<HTMLAnchorElement> {
let element = HTMLAnchorElement::new_inherited(localName, prefix, document);
Node::reflect_node(box element, document, HTMLAnchorElementBinding::Wrap)
}
// https://html.spec.whatwg.org/multipage/#concept-hyperlink-url-set
fn set_url(&self) {
let attribute = self.upcast::<Element>().get_attribute(&ns!(), &atom!("href"));
*self.url.borrow_mut() = attribute.and_then(|attribute| {
let document = document_from_node(self);
document.url().join(&attribute.value()).ok()
});
}
// https://html.spec.whatwg.org/multipage/#reinitialise-url
fn reinitialize_url(&self) {
// Step 1.
match *self.url.borrow() {
None => return,
Some(ref url) if url.scheme() == "blob" && url.cannot_be_a_base() => return,
_ => (),
}
// Step 2.
self.set_url();
}
// https://html.spec.whatwg.org/multipage/#update-href
fn update_href(&self) {
self.upcast::<Element>().set_string_attribute(&atom!("href"),
self.url.borrow().as_ref().unwrap().as_str().into());
}
}
impl VirtualMethods for HTMLAnchorElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn parse_plain_attribute(&self, name: &Atom, value: DOMString) -> AttrValue {
match name {
&atom!("rel") => AttrValue::from_serialized_tokenlist(value),
_ => self.super_type().unwrap().parse_plain_attribute(name, value),
}
}
}
impl HTMLAnchorElementMethods for HTMLAnchorElement {
// https://html.spec.whatwg.org/multipage/#dom-a-text
fn Text(&self) -> DOMString {
self.upcast::<Node>().GetTextContent().unwrap()
}
// https://html.spec.whatwg.org/multipage/#dom-a-text
fn SetText(&self, value: DOMString) {
self.upcast::<Node>().SetTextContent(Some(value))
}
// https://html.spec.whatwg.org/multipage/#dom-a-rellist
fn RelList(&self) -> Root<DOMTokenList> {
self.rel_list.or_init(|| {
DOMTokenList::new(self.upcast(), &atom!("rel"))
})
}
// https://html.spec.whatwg.org/multipage/#dom-a-coords
make_getter!(Coords, "coords");
// https://html.spec.whatwg.org/multipage/#dom-a-coords
make_setter!(SetCoords, "coords");
// https://html.spec.whatwg.org/multipage/#dom-a-name
make_getter!(Name, "name");
// https://html.spec.whatwg.org/multipage/#dom-a-name
make_setter!(SetName, "name");
// https://html.spec.whatwg.org/multipage/#dom-a-rev
make_getter!(Rev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-a-rev
make_setter!(SetRev, "rev");
// https://html.spec.whatwg.org/multipage/#dom-a-shape
make_getter!(Shape, "shape");
// https://html.spec.whatwg.org/multipage/#dom-a-shape
make_setter!(SetShape, "shape");
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hash
fn Hash(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
Some(ref url) => {
// Steps 3-4.
UrlHelper::Hash(url)
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hash
fn SetHash(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.scheme() == "javascript" { return; }
// Steps 4-5.
UrlHelper::SetHash(url, value);
// Step 6.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-host
fn Host(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
Some(ref url) => {
if url.host().is_none() {
USVString(String::new())
} else {
// Steps 4-5.
UrlHelper::Host(url)
}
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-host
fn SetHost(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.cannot_be_a_base() {
return;
}
// Step 4.
UrlHelper::SetHost(url, value);
// Step 5.
self.update_href();
}
}
<|fim▁hole|> fn Hostname(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
Some(ref url) => {
// Step 4.
UrlHelper::Hostname(url)
}
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hostname
fn SetHostname(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.cannot_be_a_base() {
return;
}
// Step 4.
UrlHelper::SetHostname(url, value);
// Step 5.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-href
fn Href(&self) -> USVString {
// Step 1.
self.reinitialize_url();
USVString(match *self.url.borrow() {
None => {
match self.upcast::<Element>().get_attribute(&ns!(), &atom!("href")) {
// Step 3.
None => String::new(),
// Step 4.
Some(attribute) => (**attribute.value()).to_owned(),
}
},
// Step 5.
Some(ref url) => url.as_str().to_owned(),
})
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-href
fn SetHref(&self, value: USVString) {
self.upcast::<Element>().set_string_attribute(&atom!("href"),
DOMString::from_string(value.0));
self.set_url();
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-password
fn Password(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
// Steps 3-4.
Some(ref url) => UrlHelper::Password(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-password
fn SetPassword(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.host().is_none() || url.cannot_be_a_base() {
return;
}
// Step 4.
UrlHelper::SetPassword(url, value);
// Step 5.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-pathname
fn Pathname(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
// Steps 4-5.
Some(ref url) => UrlHelper::Pathname(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-pathname
fn SetPathname(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.cannot_be_a_base() { return; }
// Step 5.
UrlHelper::SetPathname(url, value);
// Step 6.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-port
fn Port(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 3.
None => USVString(String::new()),
// Step 4.
Some(ref url) => UrlHelper::Port(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-port
fn SetPort(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.host().is_none() ||
url.cannot_be_a_base() ||
url.scheme() == "file" {
return;
}
// Step 4.
UrlHelper::SetPort(url, value);
// Step 5.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-protocol
fn Protocol(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 2.
None => USVString(":".to_owned()),
// Step 3.
Some(ref url) => UrlHelper::Protocol(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-protocol
fn SetProtocol(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 2.
if let Some(url) = self.url.borrow_mut().as_mut() {
// Step 3.
UrlHelper::SetProtocol(url, value);
// Step 4.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-search
fn Search(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 2.
None => USVString(String::new()),
// Step 3.
Some(ref url) => UrlHelper::Search(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-search
fn SetSearch(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
// Steps 4-5.
// TODO add this element's node document character encoding as
// encoding override (as described in the spec)
UrlHelper::SetSearch(url, value);
// Step 6.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-username
fn Username(&self) -> USVString {
// Step 1.
self.reinitialize_url();
match *self.url.borrow() {
// Step 2.
None => USVString(String::new()),
// Step 3.
Some(ref url) => UrlHelper::Username(url)
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-username
fn SetUsername(&self, value: USVString) {
// Step 1.
self.reinitialize_url();
// Step 3.
if let Some(url) = self.url.borrow_mut().as_mut() {
if url.host().is_none() || url.cannot_be_a_base() {
return;
}
// Step 4.
UrlHelper::SetUsername(url, value);
// Step 5.
self.update_href();
}
}
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-href
fn Stringifier(&self) -> DOMString {
DOMString::from(self.Href().0)
}
}
impl Activatable for HTMLAnchorElement {
fn as_element(&self) -> &Element {
self.upcast::<Element>()
}
fn is_instance_activatable(&self) -> bool {
// https://html.spec.whatwg.org/multipage/#hyperlink
// "a [...] element[s] with an href attribute [...] must [..] create a
// hyperlink"
// https://html.spec.whatwg.org/multipage/#the-a-element
// "The activation behaviour of a elements *that create hyperlinks*"
self.upcast::<Element>().has_attribute(&atom!("href"))
}
//TODO:https://html.spec.whatwg.org/multipage/#the-a-element
fn pre_click_activation(&self) {
}
//TODO:https://html.spec.whatwg.org/multipage/#the-a-element
// https://html.spec.whatwg.org/multipage/#run-canceled-activation-steps
fn canceled_activation(&self) {
}
//https://html.spec.whatwg.org/multipage/#the-a-element:activation-behaviour
fn activation_behavior(&self, event: &Event, target: &EventTarget) {
//Step 1. If the node document is not fully active, abort.
let doc = document_from_node(self);
if !doc.is_fully_active() {
return;
}
//TODO: Step 2. Check if browsing context is specified and act accordingly.
//Step 3. Handle <img ismap/>.
let element = self.upcast::<Element>();
let mouse_event = event.downcast::<MouseEvent>().unwrap();
let mut ismap_suffix = None;
if let Some(element) = target.downcast::<Element>() {
if target.is::<HTMLImageElement>() && element.has_attribute(&atom!("ismap")) {
let target_node = element.upcast::<Node>();
let rect = window_from_node(target_node).content_box_query(
target_node.to_trusted_node_address());
ismap_suffix = Some(
format!("?{},{}", mouse_event.ClientX().to_f32().unwrap() - rect.origin.x.to_f32_px(),
mouse_event.ClientY().to_f32().unwrap() - rect.origin.y.to_f32_px())
)
}
}
// Step 4.
//TODO: Download the link is `download` attribute is set.
follow_hyperlink(element, ismap_suffix);
}
//TODO:https://html.spec.whatwg.org/multipage/#the-a-element
fn implicit_submission(&self, _ctrlKey: bool, _shiftKey: bool, _altKey: bool, _metaKey: bool) {
}
}
/// https://html.spec.whatwg.org/multipage/#following-hyperlinks-2
fn follow_hyperlink(subject: &Element, hyperlink_suffix: Option<String>) {
// Step 1: replace.
// Step 2: source browsing context.
// Step 3: target browsing context.
// Step 4.
let attribute = subject.get_attribute(&ns!(), &atom!("href")).unwrap();
let mut href = attribute.Value();
// Step 6.
// https://www.w3.org/Bugs/Public/show_bug.cgi?id=28925
if let Some(suffix) = hyperlink_suffix {
href.push_str(&suffix);
}
// Step 4-5.
let document = document_from_node(subject);
let url = match document.url().join(&href) {
Ok(url) => url,
Err(_) => return,
};
// Step 7.
debug!("following hyperlink to {}", url);
let window = document.window();
window.load_url(url);
}<|fim▁end|>
|
// https://html.spec.whatwg.org/multipage/#dom-hyperlink-hostname
|
<|file_name|>plotutils.py<|end_file_name|><|fim▁begin|>import numpy as nm
try:
import matplotlib.pyplot as plt
import matplotlib as mpl
except (ImportError, RuntimeError):
plt = mpl = None
#print 'matplotlib import failed!'
from sfepy.base.base import output, pause
def spy(mtx, eps=None, color='b', **kwargs):
"""
Show sparsity structure of a `scipy.sparse` matrix.
"""
aux = mtx.tocoo()
ij, val = nm.concatenate((aux.row[:,nm.newaxis],
aux.col[:,nm.newaxis]), 1), aux.data
n_item = aux.getnnz()
n_row, n_col = aux.shape
if eps is not None:
output('using eps =', eps)
ij = nm.compress(nm.absolute(val) > eps, ij, 0)
n_item = ij.shape[0]
else:
output('showing all')
output('n_item:', n_item)
if n_item:
args = {'marker' : '.', 'markersize' : 0.5, 'markeredgewidth' : 0.5}
args.update(kwargs)
plt.plot(ij[:,1] + 0.5, ij[:,0] + 0.5, color, linestyle='None',
**args)
plt.axis([-0.5, n_row+0.5, -0.5, n_col+0.5])
plt.axis('image')
plt.xlabel(r'%d x %d: %d nnz, %.2f%% fill'
% (n_row, n_col, n_item, 100. * n_item /
(float(n_row) * float(n_col))))
ax = plt.gca()
ax.set_ylim(ax.get_ylim()[::-1])
def spy_and_show(mtx, **kwargs):
spy(mtx, **kwargs)
plt.show()
##
# 13.12.2005, c
def print_matrix_diff( title, legend, mtx1, mtx2, mtx_da, mtx_dr, iis ):
import copy
print '%s: ir, ic, %s, %s, adiff, rdiff' % ((title,) + tuple( legend ))
aux = copy.copy(mtx_da)
aux.data = nm.ones(mtx_da.data.shape[0])
irs, ics = aux.nonzero()
for ii in iis:
ir, ic = irs[ii], ics[ii]
print '%5d %5d %11.4e %11.4e %9.2e %9.2e'\
% (ir, ic, mtx1[ir,ic], mtx2[ir,ic], mtx_da[ir,ic], mtx_dr[ir,ic] )
print 'total: %d' % len( iis )
##
# 13.12.2005, c
# 14.12.2005
# 15.12.2005
# 18.07.2007
def plot_matrix_diff( mtx1, mtx2, delta, legend, mode ):
eps = 1e-16
print nm.amin( mtx1.data ), nm.amin( mtx2.data )
print nm.amax( mtx1.data ), nm.amax( mtx2.data )
mtx_da = mtx1.copy() # To preserve structure of mtx1.
mtx_da.data[:] = nm.abs( mtx1.data - mtx2.data )<|fim▁hole|> mtx_dr.data[iin] = mtx_da.data[iin] / nm.abs( mtx1.data[iin] )
print nm.amin( mtx_da.data ), nm.amax( mtx_da.data )
print nm.amin( mtx_dr.data ), nm.amax( mtx_dr.data )
epsilon = max( 1e-5, 10 * delta )
print 'epsilon:', epsilon
pause()
ija = nm.where( mtx_da.data > epsilon )[0]
print_matrix_diff( '--- absolute diff', legend,
mtx1, mtx2, mtx_da, mtx_dr, ija )
pause()
iin = nm.where( nm.abs( mtx1.data ) > epsilon )[0]
ij = nm.where( nm.abs( mtx_dr.data[iin] ) > epsilon )[0]
ij = iin[ij]
print_matrix_diff( '--- relative diff', legend,
mtx1, mtx2, mtx_da, mtx_dr, ij )
pause()
ijb = nm.intersect1d( ija, ij )
print_matrix_diff( '--- a-r', legend,
mtx1, mtx2, mtx_da, mtx_dr, ijb )
pause()
ii = nm.argsort( mtx_dr.data[ijb] )
n_s = min( 20, len( ii ) )
ijbs = ijb[ii[-1:-n_s-1:-1]]
print_matrix_diff( '--- a-r 20 biggest (by r)', legend,
mtx1, mtx2, mtx_da, mtx_dr, ijbs )
pause()
if mode < 2: return
h = 100
plt.figure( h ); plt.clf()
plt.axes( [0.04, 0.6, 0.3, 0.3], frameon = True )
spy( mtx_da, epsilon )
plt.title( 'absolute diff' )
plt.axes( [0.68, 0.6, 0.3, 0.3], frameon = True )
iia = nm.where( mtx_dr.data )[0]
mtx_dr.data[nm.setdiff1d( iia, iin )] = 0.0
spy( mtx_dr, epsilon )
plt.title( 'relative diff' )
plt.axes( [0.36, 0.6, 0.3, 0.3], frameon = True )
mtx = mtx_dr.copy()
mtx.data[:] = 0.0
ii = nm.intersect1d( nm.where( mtx_dr.data > epsilon )[0],
nm.where( mtx_da.data > epsilon )[0] )
mtx.data[ii] = 1.0
spy( mtx, epsilon )
plt.title( 'a-r intersection' )
plt.axes( [0.04, 0.08, 0.42, 0.42], frameon = True )
spy( mtx1, epsilon )
plt.title( legend[0] )
plt.axes( [0.54, 0.08, 0.42, 0.42], frameon = True )
spy( mtx2, epsilon )
plt.title( legend[1] )
plt.show()
##
# 02.05.2006, c
def set_axes_font_size( ax, size ):
labels = ax.get_xticklabels() + ax.get_yticklabels()
for label in labels:
label.set_size( size )
##
# 27.09.2006, c
def font_size( size ):
return mpl.font_manager.FontProperties( size = size )
##
# 28.08.2007, c
def iplot( *args, **kwargs ):
plt.ion()
plt.plot( *args, **kwargs )
plt.draw()
plt.ioff()
pause()<|fim▁end|>
|
mtx_dr = mtx_da.copy()
mtx_dr.data[:] = -1
iin = nm.where( nm.abs( mtx1.data ) > eps )[0]
|
<|file_name|>Extension.py<|end_file_name|><|fim▁begin|>class Extension(object):
"""
Base class for creating extensions.
Args:
kwargs[dict]: All key, value pairings are stored as "configuration" options, see getConfigs.
"""
def __init__(self, **kwargs):
#: Configure options
self._configs = kwargs
self._configs.setdefault('headings', ['section', 'subsection', 'subsubsection', 'textbf', 'underline', 'emph'])
def getConfigs(self):
"""
Return the dictionary of configure options.
"""
return self._configs<|fim▁hole|>
def extend(self, translator):
"""
Elements should be added to the storage of the Translator instance within this function.
Args:
translator[Translator]: The object to be used for converting the html.
"""
pass<|fim▁end|>
| |
<|file_name|>0059_session_has_finished.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2017-12-18 07:22
from __future__ import unicode_literals
from django.db import migrations, models<|fim▁hole|>
dependencies = [
('exams', '0058_fill_explanation_is_contribution'),
]
operations = [
migrations.AddField(
model_name='session',
name='has_finished',
field=models.NullBooleanField(default=None),
),
]<|fim▁end|>
|
class Migration(migrations.Migration):
|
<|file_name|>shader.rs<|end_file_name|><|fim▁begin|>use gl;
use version::Version;
use version::Api;
use CapabilitiesSource;
use backend::Facade;
use context::Context;
use ContextExt;
use std::{ffi, mem, ptr};
use std::rc::Rc;
use GlObject;
use Handle;
use program::ProgramCreationError;
/// A single, compiled but unlinked, shader.
pub struct Shader {
context: Rc<Context>,
id: Handle,
}
impl GlObject for Shader {
type Id = Handle;
#[inline]
fn get_id(&self) -> Handle {
self.id
}
}
impl Drop for Shader {
fn drop(&mut self) {
let ctxt = self.context.make_current();
unsafe {
match self.id {
Handle::Id(id) => {
assert!(ctxt.version >= &Version(Api::Gl, 2, 0) ||
ctxt.version >= &Version(Api::GlEs, 2, 0));
ctxt.gl.DeleteShader(id);
},
Handle::Handle(id) => {
assert!(ctxt.extensions.gl_arb_shader_objects);
ctxt.gl.DeleteObjectARB(id);
}
}
}
}
}
/// Builds an individual shader.
pub fn build_shader<F>(facade: &F, shader_type: gl::types::GLenum, source_code: &str)
-> Result<Shader, ProgramCreationError> where F: Facade
{
unsafe {
let mut ctxt = facade.get_context().make_current();
if ctxt.capabilities.supported_glsl_versions.is_empty() {
return Err(ProgramCreationError::CompilationNotSupported);
}
if !check_shader_type_compatibility(&mut ctxt, shader_type) {
return Err(ProgramCreationError::ShaderTypeNotSupported);
}
let source_code = ffi::CString::new(source_code.as_bytes()).unwrap();
let id = if ctxt.version >= &Version(Api::Gl, 2, 0) ||
ctxt.version >= &Version(Api::GlEs, 2, 0)
{
Handle::Id(ctxt.gl.CreateShader(shader_type))
} else if ctxt.extensions.gl_arb_shader_objects {
Handle::Handle(ctxt.gl.CreateShaderObjectARB(shader_type))
} else {
unreachable!()
};
if id == Handle::Id(0) || id == Handle::Handle(0 as gl::types::GLhandleARB) {
return Err(ProgramCreationError::ShaderTypeNotSupported);
}
match id {
Handle::Id(id) => {
assert!(ctxt.version >= &Version(Api::Gl, 2, 0) ||
ctxt.version >= &Version(Api::GlEs, 2, 0));
ctxt.gl.ShaderSource(id, 1, [ source_code.as_ptr() ].as_ptr(), ptr::null());
},
Handle::Handle(id) => {
assert!(ctxt.extensions.gl_arb_shader_objects);
ctxt.gl.ShaderSourceARB(id, 1, [ source_code.as_ptr() ].as_ptr(), ptr::null());
}
}
// compiling
{
ctxt.report_debug_output_errors.set(false);
match id {
Handle::Id(id) => {
assert!(ctxt.version >= &Version(Api::Gl, 2, 0)||
ctxt.version >= &Version(Api::GlEs, 2, 0));
ctxt.gl.CompileShader(id);
},
Handle::Handle(id) => {
assert!(ctxt.extensions.gl_arb_shader_objects);
ctxt.gl.CompileShaderARB(id);
}
}
ctxt.report_debug_output_errors.set(true);
}
// checking compilation success by reading a flag on the shader
let compilation_success = {
let mut compilation_success: gl::types::GLint = mem::uninitialized();<|fim▁hole|> Handle::Id(id) => {
assert!(ctxt.version >= &Version(Api::Gl, 2, 0) ||
ctxt.version >= &Version(Api::GlEs, 2, 0));
ctxt.gl.GetShaderiv(id, gl::COMPILE_STATUS, &mut compilation_success);
},
Handle::Handle(id) => {
assert!(ctxt.extensions.gl_arb_shader_objects);
ctxt.gl.GetObjectParameterivARB(id, gl::OBJECT_COMPILE_STATUS_ARB,
&mut compilation_success);
}
}
compilation_success
};
if compilation_success == 1 {
Ok(Shader {
context: facade.get_context().clone(),
id: id
})
} else {
// compilation error
let mut error_log_size: gl::types::GLint = mem::uninitialized();
match id {
Handle::Id(id) => {
assert!(ctxt.version >= &Version(Api::Gl, 2, 0) ||
ctxt.version >= &Version(Api::GlEs, 2, 0));
ctxt.gl.GetShaderiv(id, gl::INFO_LOG_LENGTH, &mut error_log_size);
},
Handle::Handle(id) => {
assert!(ctxt.extensions.gl_arb_shader_objects);
ctxt.gl.GetObjectParameterivARB(id, gl::OBJECT_INFO_LOG_LENGTH_ARB,
&mut error_log_size);
}
}
let mut error_log: Vec<u8> = Vec::with_capacity(error_log_size as usize);
match id {
Handle::Id(id) => {
assert!(ctxt.version >= &Version(Api::Gl, 2, 0) ||
ctxt.version >= &Version(Api::GlEs, 2, 0));
ctxt.gl.GetShaderInfoLog(id, error_log_size, &mut error_log_size,
error_log.as_mut_ptr() as *mut gl::types::GLchar);
},
Handle::Handle(id) => {
assert!(ctxt.extensions.gl_arb_shader_objects);
ctxt.gl.GetInfoLogARB(id, error_log_size, &mut error_log_size,
error_log.as_mut_ptr() as *mut gl::types::GLchar);
}
}
error_log.set_len(error_log_size as usize);
match String::from_utf8(error_log) {
Ok(msg) => Err(ProgramCreationError::CompilationError(msg)),
Err(_) => Err(
ProgramCreationError::CompilationError("Could not convert the log \
message to UTF-8".to_owned())
),
}
}
}
}
pub fn check_shader_type_compatibility<C>(ctxt: &C, shader_type: gl::types::GLenum)
-> bool where C: CapabilitiesSource
{
match shader_type {
gl::VERTEX_SHADER | gl::FRAGMENT_SHADER => (),
gl::GEOMETRY_SHADER => {
if !(ctxt.get_version() >= &Version(Api::Gl, 3, 2))
&& !(ctxt.get_version() >= &Version(Api::GlEs, 3, 2))
&& !ctxt.get_extensions().gl_arb_geometry_shader4
&& !ctxt.get_extensions().gl_ext_geometry_shader4
&& !ctxt.get_extensions().gl_ext_geometry_shader
&& !ctxt.get_extensions().gl_oes_geometry_shader
{
return false;
}
},
gl::TESS_CONTROL_SHADER | gl::TESS_EVALUATION_SHADER => {
if !(ctxt.get_version() >= &Version(Api::Gl, 4, 0))
&& !(ctxt.get_version() >= &Version(Api::GlEs, 3, 2))
&& !ctxt.get_extensions().gl_arb_tessellation_shader
&& !ctxt.get_extensions().gl_oes_tessellation_shader
{
return false;
}
},
gl::COMPUTE_SHADER => {
if !(ctxt.get_version() >= &Version(Api::Gl, 4, 3))
&& !(ctxt.get_version() >= &Version(Api::GlEs, 3, 1))
&& !ctxt.get_extensions().gl_arb_compute_shader
{
return false;
}
},
_ => unreachable!()
};
true
}<|fim▁end|>
|
match id {
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
__author__ = 'mkaplenko'
import httplib
import time
class MobilMoneySms(object):
def __init__(self, phone_to, message):
self.phone_to = phone_to<|fim▁hole|> self.message = message
self.sync = int(time.time()*100)
class MobilMoneySmsClient(object):
connection_host = 'gate.mobilmoney.ru'
response = None
sms = None
sync = 1
def __init__(self, login, password, originator):
self.login = login
self.password = password
self.originator = originator
def register_sms(self, sms_instance):
self.sms = sms_instance
def request_body(self):
data_kwargs = {
'login': self.login,
'password': self.password,
'originator': self.originator,
'phone_to': self.sms.phone_to,
'message': self.sms.message,
'sync': unicode(self.sms.sync)
}
data = u'''
<?xml version="1.0" encoding="utf-8"?>
<request method="SendSMSFull">
<login>{login}</login>
<pwd>{password}</pwd>
<originator>{originator}</originator>
<phone_to>{phone_to}</phone_to>
<message>{message}</message>
<sync>{sync}</sync>
</request>
'''.format(**data_kwargs).encode('utf-8')
return data
def send_sms(self):
connection = httplib.HTTPConnection(self.connection_host)
connection.request('POST', '/', self.request_body())
self.response = connection.getresponse()
@property
def answer(self):
return self.response.read() if self.response else None
if __name__ == '__main__':
sms = MobilMoneySms('+79151234567', u'Привет мир! Я тестирую смс!')
client = MobilMoneySmsClient('my_login', 'my_password', 'my_originator_name')
client.register_sms(sms)
client.send_sms()
print(client.answer)<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.