prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>test_client_server.py<|end_file_name|><|fim▁begin|>import pytest
<|fim▁hole|>class TestClientServer:
def test_server_port_property(self):
port = 1050
server = Server(port=port)
assert server.port == port
def test_client_addresses_property(self):
client = Client()
addresses = (('10.0.0.1', 1567), ('10.0.0.2', 1568), ('10.0.0.3', 1569))
for ip, port in addresses:
client.connect(ip, port)
assert client.addresses == addresses<|fim▁end|> | from zeroless import (Server, Client)
|
<|file_name|>vz-projector.ts<|end_file_name|><|fim▁begin|>/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
import {ColorOption, ColumnStats, DataPoint, DataProto, DataSet, PointAccessors3D, PointMetadata, Projection, SpriteAndMetadataInfo, State, stateGetAccessorDimensions} from './data';
import {DataProvider, EmbeddingInfo, ServingMode} from './data-provider';
import {DemoDataProvider} from './data-provider-demo';
import {ProtoDataProvider} from './data-provider-proto';
import {ServerDataProvider} from './data-provider-server';
import {HoverContext, HoverListener} from './hoverContext';
import * as knn from './knn';
import * as logging from './logging';
import {ProjectorScatterPlotAdapter} from './projectorScatterPlotAdapter';
import {Mode, ScatterPlot} from './scatterPlot';
import {ScatterPlotVisualizer3DLabels} from './scatterPlotVisualizer3DLabels';
import {ScatterPlotVisualizerCanvasLabels} from './scatterPlotVisualizerCanvasLabels';
import {ScatterPlotVisualizerSprites} from './scatterPlotVisualizerSprites';
import {ScatterPlotVisualizerTraces} from './scatterPlotVisualizerTraces';
import {SelectionChangedListener, SelectionContext} from './selectionContext';
import {BookmarkPanel} from './vz-projector-bookmark-panel';
import {DataPanel} from './vz-projector-data-panel';
import {InspectorPanel} from './vz-projector-inspector-panel';
import {MetadataCard} from './vz-projector-metadata-card';
import {ProjectionsPanel} from './vz-projector-projections-panel';
// tslint:disable-next-line:no-unused-variable
import {PolymerElement, PolymerHTMLElement} from './vz-projector-util';
/**
* The minimum number of dimensions the data should have to automatically
* decide to normalize the data.
*/
const THRESHOLD_DIM_NORMALIZE = 50;
const POINT_COLOR_MISSING = 'black';
export let ProjectorPolymer = PolymerElement({
is: 'vz-projector',
properties: {
routePrefix: String,
dataProto: {type: String, observer: '_dataProtoChanged'},
servingMode: String,
projectorConfigJsonPath: String
}
});
const INDEX_METADATA_FIELD = '__index__';
export class Projector extends ProjectorPolymer implements SelectionContext,
HoverContext {
// The working subset of the data source's original data set.
dataSet: DataSet;
servingMode: ServingMode;
// The path to the projector config JSON file for demo mode.
projectorConfigJsonPath: string;
private selectionChangedListeners: SelectionChangedListener[];
private hoverListeners: HoverListener[];
private originalDataSet: DataSet;
private dom: d3.Selection<any>;
private projectorScatterPlotAdapter: ProjectorScatterPlotAdapter;
private scatterPlot: ScatterPlot;
private dim: number;
private selectedPointIndices: number[];
private neighborsOfFirstPoint: knn.NearestEntry[];
private hoverPointIndex: number;
private dataProvider: DataProvider;
private inspectorPanel: InspectorPanel;
private selectedColorOption: ColorOption;
private selectedLabelOption: string;
private routePrefix: string;
private normalizeData: boolean;
private selectedProjection: Projection;
private selectedProjectionPointAccessors: PointAccessors3D;
/** Polymer component panels */
private dataPanel: DataPanel;
private bookmarkPanel: BookmarkPanel;<|fim▁hole|> private metadataCard: MetadataCard;
private statusBar: d3.Selection<HTMLElement>;
ready() {
this.selectionChangedListeners = [];
this.hoverListeners = [];
this.selectedPointIndices = [];
this.neighborsOfFirstPoint = [];
this.dom = d3.select(this);
logging.setDomContainer(this);
this.dataPanel = this.$['data-panel'] as DataPanel;
this.inspectorPanel = this.$['inspector-panel'] as InspectorPanel;
this.inspectorPanel.initialize(this);
this.projectionsPanel = this.$['projections-panel'] as ProjectionsPanel;
this.projectionsPanel.initialize(this);
this.metadataCard = this.$['metadata-card'] as MetadataCard;
this.statusBar = this.dom.select('#status-bar');
this.bookmarkPanel = this.$['bookmark-panel'] as BookmarkPanel;
this.scopeSubtree(this.$$('#wrapper-notify-msg'), true);
this.setupUIControls();
this.initializeDataProvider();
}
setSelectedLabelOption(labelOption: string) {
this.selectedLabelOption = labelOption;
let labelAccessor = (i: number): string => {
return this.dataSet.points[i]
.metadata[this.selectedLabelOption] as string;
};
this.metadataCard.setLabelOption(this.selectedLabelOption);
this.scatterPlot.setLabelAccessor(labelAccessor);
this.scatterPlot.render();
}
setSelectedColorOption(colorOption: ColorOption) {
this.selectedColorOption = colorOption;
this.updateScatterPlotAttributes();
this.scatterPlot.render();
}
setNormalizeData(normalizeData: boolean) {
this.normalizeData = normalizeData;
this.setCurrentDataSet(this.originalDataSet.getSubset());
}
updateDataSet(
ds: DataSet, spriteAndMetadata?: SpriteAndMetadataInfo,
metadataFile?: string) {
this.originalDataSet = ds;
if (this.scatterPlot == null || this.originalDataSet == null) {
// We are not ready yet.
return;
}
this.normalizeData = this.originalDataSet.dim[1] >= THRESHOLD_DIM_NORMALIZE;
spriteAndMetadata = spriteAndMetadata || {};
if (spriteAndMetadata.pointsInfo == null) {
let [pointsInfo, stats] = this.makeDefaultPointsInfoAndStats(ds.points);
spriteAndMetadata.pointsInfo = pointsInfo;
spriteAndMetadata.stats = stats;
}
ds.mergeMetadata(spriteAndMetadata);
this.dataPanel.setNormalizeData(this.normalizeData);
this.setCurrentDataSet(this.originalDataSet.getSubset());
this.inspectorPanel.datasetChanged();
this.inspectorPanel.metadataChanged(spriteAndMetadata);
this.projectionsPanel.metadataChanged(spriteAndMetadata);
this.dataPanel.metadataChanged(spriteAndMetadata, metadataFile);
// Set the container to a fixed height, otherwise in Colab the
// height can grow indefinitely.
let container = this.dom.select('#container');
container.style('height', container.property('clientHeight') + 'px');
}
setSelectedTensor(run: string, tensorInfo: EmbeddingInfo) {
this.bookmarkPanel.setSelectedTensor(run, tensorInfo);
}
/**
* Registers a listener to be called any time the selected point set changes.
*/
registerSelectionChangedListener(listener: SelectionChangedListener) {
this.selectionChangedListeners.push(listener);
}
filterDataset() {
let indices = this.selectedPointIndices.concat(
this.neighborsOfFirstPoint.map(n => n.index));
let selectionSize = this.selectedPointIndices.length;
this.setCurrentDataSet(this.dataSet.getSubset(indices));
this.adjustSelectionAndHover(d3.range(selectionSize));
}
resetFilterDataset() {
let originalPointIndices = this.selectedPointIndices.map(localIndex => {
return this.dataSet.points[localIndex].index;
});
this.setCurrentDataSet(this.originalDataSet.getSubset());
this.updateScatterPlotPositions();
this.adjustSelectionAndHover(originalPointIndices);
}
/**
* Used by clients to indicate that a selection has occurred.
*/
notifySelectionChanged(newSelectedPointIndices: number[]) {
this.selectedPointIndices = newSelectedPointIndices;
let neighbors: knn.NearestEntry[] = [];
if (newSelectedPointIndices.length === 1) {
neighbors = this.dataSet.findNeighbors(
newSelectedPointIndices[0], this.inspectorPanel.distFunc,
this.inspectorPanel.numNN);
this.metadataCard.updateMetadata(
this.dataSet.points[newSelectedPointIndices[0]].metadata);
} else {
this.metadataCard.updateMetadata(null);
}
this.selectionChangedListeners.forEach(
l => l(this.selectedPointIndices, neighbors));
}
/**
* Registers a listener to be called any time the mouse hovers over a point.
*/
registerHoverListener(listener: HoverListener) {
this.hoverListeners.push(listener);
}
/**
* Used by clients to indicate that a hover is occurring.
*/
notifyHoverOverPoint(pointIndex: number) {
this.hoverListeners.forEach(l => l(pointIndex));
}
_dataProtoChanged(dataProtoString: string) {
let dataProto =
dataProtoString ? JSON.parse(dataProtoString) as DataProto : null;
this.initializeDataProvider(dataProto);
}
private makeDefaultPointsInfoAndStats(points: DataPoint[]):
[PointMetadata[], ColumnStats[]] {
let pointsInfo: PointMetadata[] = [];
points.forEach(p => {
let pointInfo: PointMetadata = {};
pointInfo[INDEX_METADATA_FIELD] = p.index;
pointsInfo.push(pointInfo);
});
let stats: ColumnStats[] = [{
name: INDEX_METADATA_FIELD,
isNumeric: false,
tooManyUniqueValues: true,
min: 0,
max: pointsInfo.length - 1
}];
return [pointsInfo, stats];
}
private initializeDataProvider(dataProto?: DataProto) {
if (this.servingMode === 'demo') {
this.dataProvider = new DemoDataProvider(this.projectorConfigJsonPath);
} else if (this.servingMode === 'server') {
if (!this.routePrefix) {
throw 'route-prefix is a required parameter';
}
this.dataProvider = new ServerDataProvider(this.routePrefix);
} else if (this.servingMode === 'proto' && dataProto != null) {
this.dataProvider = new ProtoDataProvider(dataProto);
}
this.dataPanel.initialize(this, this.dataProvider);
this.bookmarkPanel.initialize(this, this.dataProvider);
}
private getLegendPointColorer(colorOption: ColorOption):
(index: number) => string {
if ((colorOption == null) || (colorOption.map == null)) {
return null;
}
const colorer = (i: number) => {
let value =
this.dataSet.points[i].metadata[this.selectedColorOption.name];
if (value == null) {
return POINT_COLOR_MISSING;
}
return colorOption.map(value);
};
return colorer;
}
private get3DLabelModeButton(): any {
return this.querySelector('#labels3DMode');
}
private get3DLabelMode(): boolean {
const label3DModeButton = this.get3DLabelModeButton();
return (label3DModeButton as any).active;
}
private getSpriteImageMode(): boolean {
return this.dataSet && this.dataSet.spriteAndMetadataInfo &&
this.dataSet.spriteAndMetadataInfo.spriteImage != null;
}
adjustSelectionAndHover(selectedPointIndices: number[], hoverIndex?: number) {
this.notifySelectionChanged(selectedPointIndices);
this.notifyHoverOverPoint(hoverIndex);
this.scatterPlot.setMode(Mode.HOVER);
}
private unsetCurrentDataSet() {
this.dataSet.stopTSNE();
}
private setCurrentDataSet(ds: DataSet) {
this.adjustSelectionAndHover([]);
if (this.dataSet != null) {
this.unsetCurrentDataSet();
}
this.dataSet = ds;
if (this.normalizeData) {
this.dataSet.normalize();
}
this.dim = this.dataSet.dim[1];
this.dom.select('span.numDataPoints').text(this.dataSet.dim[0]);
this.dom.select('span.dim').text(this.dataSet.dim[1]);
this.selectedProjectionPointAccessors = null;
this.projectionsPanel.dataSetUpdated(
this.dataSet, this.originalDataSet, this.dim);
this.scatterPlot.setCameraParametersForNextCameraCreation(null, true);
}
private setupUIControls() {
// View controls
this.querySelector('#reset-zoom').addEventListener('click', () => {
this.scatterPlot.resetZoom();
this.scatterPlot.startOrbitAnimation();
});
let selectModeButton = this.querySelector('#selectMode');
selectModeButton.addEventListener('click', (event) => {
this.scatterPlot.setMode(
(selectModeButton as any).active ? Mode.SELECT : Mode.HOVER);
});
let nightModeButton = this.querySelector('#nightDayMode');
nightModeButton.addEventListener('click', () => {
this.scatterPlot.setDayNightMode((nightModeButton as any).active);
});
const labels3DModeButton = this.get3DLabelModeButton();
labels3DModeButton.addEventListener('click', () => {
this.createVisualizers(this.get3DLabelMode());
this.updateScatterPlotAttributes();
this.scatterPlot.render();
});
window.addEventListener('resize', () => {
let container = this.dom.select('#container');
let parentHeight =
(container.node().parentNode as HTMLElement).clientHeight;
container.style('height', parentHeight + 'px');
this.scatterPlot.resize();
});
this.projectorScatterPlotAdapter = new ProjectorScatterPlotAdapter();
this.scatterPlot = new ScatterPlot(
this.getScatterContainer(),
i => '' + this.dataSet.points[i].metadata[this.selectedLabelOption],
this, this);
this.createVisualizers(false);
this.scatterPlot.onCameraMove(
(cameraPosition: THREE.Vector3, cameraTarget: THREE.Vector3) =>
this.bookmarkPanel.clearStateSelection());
this.registerHoverListener(
(hoverIndex: number) => this.onHover(hoverIndex));
this.registerSelectionChangedListener(
(selectedPointIndices: number[],
neighborsOfFirstPoint: knn.NearestEntry[]) =>
this.onSelectionChanged(
selectedPointIndices, neighborsOfFirstPoint));
this.scatterPlot.resize();
this.scatterPlot.render();
}
private onHover(hoverIndex: number) {
this.hoverPointIndex = hoverIndex;
let hoverText = null;
if (hoverIndex != null) {
const point = this.dataSet.points[hoverIndex];
if (point.metadata[this.selectedLabelOption]) {
hoverText = point.metadata[this.selectedLabelOption].toString();
}
}
this.updateScatterPlotAttributes();
this.scatterPlot.render();
if (this.selectedPointIndices.length === 0) {
this.statusBar.style('display', hoverText ? null : 'none');
this.statusBar.text(hoverText);
}
}
private updateScatterPlotPositions() {
if (this.dataSet == null) {
return;
}
if (this.selectedProjectionPointAccessors == null) {
return;
}
const newPositions =
this.projectorScatterPlotAdapter.generatePointPositionArray(
this.dataSet, this.selectedProjectionPointAccessors);
this.scatterPlot.setPointPositions(this.dataSet, newPositions);
}
private updateScatterPlotAttributes() {
const dataSet = this.dataSet;
const selectedSet = this.selectedPointIndices;
const hoverIndex = this.hoverPointIndex;
const neighbors = this.neighborsOfFirstPoint;
const pointColorer = this.getLegendPointColorer(this.selectedColorOption);
const adapter = this.projectorScatterPlotAdapter;
const pointColors = adapter.generatePointColorArray(
dataSet, pointColorer, selectedSet, neighbors, hoverIndex,
this.get3DLabelMode(), this.getSpriteImageMode());
const pointScaleFactors = adapter.generatePointScaleFactorArray(
dataSet, selectedSet, neighbors, hoverIndex);
const labels = adapter.generateVisibleLabelRenderParams(
dataSet, selectedSet, neighbors, hoverIndex);
const traceColors =
adapter.generateLineSegmentColorMap(dataSet, pointColorer);
const traceOpacities =
adapter.generateLineSegmentOpacityArray(dataSet, selectedSet);
const traceWidths =
adapter.generateLineSegmentWidthArray(dataSet, selectedSet);
this.scatterPlot.setPointColors(pointColors);
this.scatterPlot.setPointScaleFactors(pointScaleFactors);
this.scatterPlot.setLabels(labels);
this.scatterPlot.setTraceColors(traceColors);
this.scatterPlot.setTraceOpacities(traceOpacities);
this.scatterPlot.setTraceWidths(traceWidths);
}
private getScatterContainer(): d3.Selection<any> {
return this.dom.select('#scatter');
}
private createVisualizers(inLabels3DMode: boolean) {
const scatterPlot = this.scatterPlot;
scatterPlot.removeAllVisualizers();
if (inLabels3DMode) {
scatterPlot.addVisualizer(new ScatterPlotVisualizer3DLabels());
} else {
scatterPlot.addVisualizer(new ScatterPlotVisualizerSprites());
scatterPlot.addVisualizer(
new ScatterPlotVisualizerCanvasLabels(this.getScatterContainer()));
}
scatterPlot.addVisualizer(new ScatterPlotVisualizerTraces());
}
private onSelectionChanged(
selectedPointIndices: number[],
neighborsOfFirstPoint: knn.NearestEntry[]) {
this.selectedPointIndices = selectedPointIndices;
this.neighborsOfFirstPoint = neighborsOfFirstPoint;
let totalNumPoints =
this.selectedPointIndices.length + neighborsOfFirstPoint.length;
this.statusBar.text(`Selected ${totalNumPoints} points`)
.style('display', totalNumPoints > 0 ? null : 'none');
this.inspectorPanel.updateInspectorPane(
selectedPointIndices, neighborsOfFirstPoint);
this.updateScatterPlotAttributes();
this.scatterPlot.render();
}
setProjection(
projection: Projection, dimensionality: number,
pointAccessors: PointAccessors3D) {
this.selectedProjection = projection;
this.selectedProjectionPointAccessors = pointAccessors;
this.scatterPlot.setDimensions(dimensionality);
if (this.dataSet.projectionCanBeRendered(projection)) {
this.updateScatterPlotAttributes();
this.notifyProjectionsUpdated();
}
this.scatterPlot.setCameraParametersForNextCameraCreation(null, false);
}
notifyProjectionsUpdated() {
this.updateScatterPlotPositions();
this.scatterPlot.render();
}
/**
* Gets the current view of the embedding and saves it as a State object.
*/
getCurrentState(): State {
const state = new State();
// Save the individual datapoint projections.
state.projections = [];
for (let i = 0; i < this.dataSet.points.length; i++) {
const point = this.dataSet.points[i];
const projections: {[key: string]: number} = {};
const keys = Object.keys(point.projections);
for (let j = 0; j < keys.length; ++j) {
projections[keys[j]] = point.projections[keys[j]];
}
state.projections.push(projections);
}
state.selectedProjection = this.selectedProjection;
state.dataSetDimensions = this.dataSet.dim;
state.tSNEIteration = this.dataSet.tSNEIteration;
state.selectedPoints = this.selectedPointIndices;
state.cameraDef = this.scatterPlot.getCameraDef();
state.selectedColorOptionName = this.dataPanel.selectedColorOptionName;
state.selectedLabelOption = this.selectedLabelOption;
this.projectionsPanel.populateBookmarkFromUI(state);
return state;
}
/** Loads a State object into the world. */
loadState(state: State) {
for (let i = 0; i < state.projections.length; i++) {
const point = this.dataSet.points[i];
const projection = state.projections[i];
const keys = Object.keys(projection);
for (let j = 0; j < keys.length; ++j) {
point.projections[keys[j]] = projection[keys[j]];
}
}
this.dataSet.hasTSNERun = (state.selectedProjection === 'tsne');
this.dataSet.tSNEIteration = state.tSNEIteration;
this.projectionsPanel.restoreUIFromBookmark(state);
this.dataPanel.selectedColorOptionName = state.selectedColorOptionName;
this.selectedLabelOption = state.selectedLabelOption;
this.scatterPlot.setCameraParametersForNextCameraCreation(
state.cameraDef, false);
{
const dimensions = stateGetAccessorDimensions(state);
const accessors =
this.dataSet.getPointAccessors(state.selectedProjection, dimensions);
this.setProjection(
state.selectedProjection, dimensions.length, accessors);
}
this.notifySelectionChanged(state.selectedPoints);
}
}
document.registerElement(Projector.prototype.is, Projector);<|fim▁end|> | private projectionsPanel: ProjectionsPanel; |
<|file_name|>dropout.py<|end_file_name|><|fim▁begin|>import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
import sys<|fim▁hole|>have_data = os.path.exists('MNIST_data/train-images-idx3-ubyte.gz')
if not have_data:
download('./MNIST_data')
# load data
mnist = input_data.read_data_sets("MNIST_data", one_hot=True)
# batch
batch_size = 64
n_batch = mnist.train.num_examples // batch_size
# in [60000, 28 * 28] out [60000, 10]
x = tf.placeholder(tf.float32, [None,784])
y = tf.placeholder(tf.float32, [None,10])
keep_prob = tf.placeholder(tf.float32)
# 神经网络结构 784-1000-500-10
w1 = tf.Variable(tf.truncated_normal([784,1000], stddev=0.1))
b1 = tf.Variable(tf.zeros([1000]) + 0.1)
l1 = tf.nn.tanh(tf.matmul(x, w1) + b1)
l1_drop = tf.nn.dropout(l1, keep_prob)
w2 = tf.Variable(tf.truncated_normal([1000, 500], stddev=0.1))
b2 = tf.Variable(tf.zeros([500]) + 0.1)
l2 = tf.nn.tanh(tf.matmul(l1_drop, w2) + b2)
l2_drop = tf.nn.dropout(l2, keep_prob)
w3 = tf.Variable(tf.truncated_normal([500, 10], stddev=0.1))
b3 = tf.Variable(tf.zeros([10]) + 0.1)
prediction = tf.nn.softmax(tf.matmul(l2_drop, w3) + b3)
# 二次代价函数 - 回归问题
# loss = tf.losses.mean_squared_error(y, prediction)
# 交叉墒-分类问题
loss = tf.losses.softmax_cross_entropy(y, prediction)
# 梯度下降法优化器
train = tf.train.GradientDescentOptimizer(0.5).minimize(loss)
# save result to a bool array
# 1000 0000 00 -> 0
# 0100 0000 00 -> 1
# ...
correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(prediction, 1))
# correct rate, bool -> float ->mean
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
with tf.Session() as sess:
# init variable
sess.run(tf.global_variables_initializer())
for epoch in range(10):
for batch in range(n_batch):
# get a batch data and label
batch_x, batch_y = mnist.train.next_batch(batch_size)
sess.run(train, feed_dict={x:batch_x, y:batch_y, keep_prob:0.5})
acc = sess.run(accuracy, feed_dict={x:mnist.test.images, y:mnist.test.labels, keep_prob:1.0})
train_acc = sess.run(accuracy, feed_dict={x:mnist.train.images, y:mnist.train.labels, keep_prob:1.0})
print("Iter " + str(epoch + 1) + ", Testing Accuracy " + str(acc) + ", Training Accuracy " + str(train_acc))<|fim▁end|> | sys.path.append('./MNIST_data')
import os.path
from download import download |
<|file_name|>operation.go<|end_file_name|><|fim▁begin|>package subscription
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is regenerated.
import (
"context"
"github.com/Azure/go-autorest/autorest"
"github.com/Azure/go-autorest/autorest/azure"
"github.com/Azure/go-autorest/tracing"
"net/http"
)
// OperationClient is the the subscription client
type OperationClient struct {
BaseClient
}
// NewOperationClient creates an instance of the OperationClient client.
func NewOperationClient() OperationClient {
return NewOperationClientWithBaseURI(DefaultBaseURI)
}
// NewOperationClientWithBaseURI creates an instance of the OperationClient client using a custom endpoint. Use this
// when interacting with an Azure cloud that uses a non-standard base URI (sovereign clouds, Azure stack).
func NewOperationClientWithBaseURI(baseURI string) OperationClient {
return OperationClient{NewWithBaseURI(baseURI)}
}
// Get get the status of the pending Microsoft.Subscription API operations.
// Parameters:
// operationID - the operation ID, which can be found from the Location field in the generate recommendation
// response header.
func (client OperationClient) Get(ctx context.Context, operationID string) (result CreationResult, err error) {
if tracing.IsEnabled() {
ctx = tracing.StartSpan(ctx, fqdn+"/OperationClient.Get")
defer func() {
sc := -1
if result.Response.Response != nil {
sc = result.Response.Response.StatusCode
}
tracing.EndSpan(ctx, sc, err)
}()
}
req, err := client.GetPreparer(ctx, operationID)
if err != nil {
err = autorest.NewErrorWithError(err, "subscription.OperationClient", "Get", nil, "Failure preparing request")
return
}
resp, err := client.GetSender(req)
if err != nil {
result.Response = autorest.Response{Response: resp}
err = autorest.NewErrorWithError(err, "subscription.OperationClient", "Get", resp, "Failure sending request")
return
}
result, err = client.GetResponder(resp)
if err != nil {
err = autorest.NewErrorWithError(err, "subscription.OperationClient", "Get", resp, "Failure responding to request")
return
}
return
}
<|fim▁hole|> }
const APIVersion = "2019-10-01-preview"
queryParameters := map[string]interface{}{
"api-version": APIVersion,
}
preparer := autorest.CreatePreparer(
autorest.AsGet(),
autorest.WithBaseURL(client.BaseURI),
autorest.WithPathParameters("/providers/Microsoft.Subscription/subscriptionOperations/{operationId}", pathParameters),
autorest.WithQueryParameters(queryParameters))
return preparer.Prepare((&http.Request{}).WithContext(ctx))
}
// GetSender sends the Get request. The method will close the
// http.Response Body if it receives an error.
func (client OperationClient) GetSender(req *http.Request) (*http.Response, error) {
return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
}
// GetResponder handles the response to the Get request. The method always
// closes the http.Response Body.
func (client OperationClient) GetResponder(resp *http.Response) (result CreationResult, err error) {
err = autorest.Respond(
resp,
azure.WithErrorUnlessStatusCode(http.StatusOK, http.StatusAccepted),
autorest.ByUnmarshallingJSON(&result),
autorest.ByClosing())
result.Response = autorest.Response{Response: resp}
return
}<|fim▁end|> | // GetPreparer prepares the Get request.
func (client OperationClient) GetPreparer(ctx context.Context, operationID string) (*http.Request, error) {
pathParameters := map[string]interface{}{
"operationId": autorest.Encode("path", operationID), |
<|file_name|>ControllerMountSpyPlugin.ts<|end_file_name|><|fim▁begin|>import { PluginConfig, exportDependency, PluginConstructor } from '../../core/PluginConfig'
import { memoize } from 'lodash';
export type MountSpyCondition = (x: React.Component) => boolean
export interface MountSpyObserver {
condition: MountSpyCondition
resolve: () => void
}
export interface ControllerMountSpyPlugin extends PluginConfig {
waitFor(condition: MountSpyCondition): Promise<void>
}
export interface ControllerMountSpy {
didMount(controller: React.Component): void
}
export const ControllerMountSpy = 'ControllerMountSpy'
export const mountSpy: () => PluginConstructor<ControllerMountSpyPlugin> = memoize(() => {
class ControllerMountSpyPluginImpl extends PluginConfig implements ControllerMountSpyPluginImpl {
private observers: MountSpyObserver[] = []
waitFor(condition: MountSpyCondition): Promise<void> {
return new Promise((resolve) => {
this.observers.push({ condition, resolve })
})<|fim▁hole|> }
@exportDependency(ControllerMountSpy)
mountSpy: ControllerMountSpy = {
didMount: (controller: React.Component) => {
this.observers.forEach((observer) => {
if (observer.condition(controller)) {
observer.resolve()
}
})
}
}
}
return ControllerMountSpyPluginImpl
})<|fim▁end|> | |
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# Copyright (c) 2016 Lyft Inc.
#<|fim▁hole|>#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
setuptools.setup(
setup_requires=['pbr'],
pbr=True)<|fim▁end|> | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at |
<|file_name|>best_fitness.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Future
from __future__ import absolute_import, division, print_function, \
unicode_literals, with_statement
# Standard Library
from datetime import datetime
# Third Party
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D # Load 3d plots capabilities
# First Party
from metaopt.plugin.plugin import Plugin
NUMBER_OF_SAMPLES = 200
COLORMAP = cm.jet
REVERSED_COLORMAP = cm.jet_r
class VisualizeBestFitnessPlugin(Plugin):
"""Visualize optimization progess"""
def __init__(self):
self.best_fitnesses = []
self.timestamps = []
self.start_time = None
self.current_best = None
self.return_spec = None
def setup(self, f, param_spec, return_spec):
del f, param_spec
self.return_spec = return_spec
if not self.start_time:
self.start_time = datetime.now()
def on_result(self, invocation):
fitness = invocation.current_result
if self.current_best is None or fitness < self.current_best:
self.current_best = fitness
<|fim▁hole|>
def show_fitness_invocations_plot(self):
"""Show a fitness--invocations plot"""
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel("Number of Invocations")
ax.set_ylabel(self.get_y_label())
ax.plot(self.best_fitnesses)
plt.show()
def show_fitness_time_plot(self):
"""Show a fitness--time plot"""
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_xlabel("Time")
ax.set_ylabel(self.get_y_label())
ax.plot(self.timestamps, self.best_fitnesses)
plt.show()
def get_y_label(self):
return self.return_spec.return_values[0]["name"]<|fim▁end|> | self.best_fitnesses.append(self.current_best.raw_values)
time_delta = datetime.now() - self.start_time
self.timestamps.append(time_delta.total_seconds()) |
<|file_name|>util-sha1.js<|end_file_name|><|fim▁begin|>/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
/* SHA-1 implementation in JavaScript (c) Chris Veness 2002-2014 / MIT Licence */
/* */
/* - see http://csrc.nist.gov/groups/ST/toolkit/secure_hashing.html */
/* http://csrc.nist.gov/groups/ST/toolkit/examples.html */
/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
/* jshint node:true *//* global define, escape, unescape */
'use strict';
/**
* SHA-1 hash function reference implementation.
*
* @namespace
*/
var Sha1 = {};
/**
* Generates SHA-1 hash of string.
*
* @param {string} msg - (Unicode) string to be hashed.
* @returns {string} Hash of msg as hex character string.
*/
Sha1.hash = function(msg) {
// convert string to UTF-8, as SHA only deals with byte-streams
msg = msg.utf8Encode();
// constants [§4.2.1]
var K = [ 0x5a827999, 0x6ed9eba1, 0x8f1bbcdc, 0xca62c1d6 ];
// PREPROCESSING
msg += String.fromCharCode(0x80); // add trailing '1' bit (+ 0's padding) to string [§5.1.1]
// convert string msg into 512-bit/16-integer blocks arrays of ints [§5.2.1]
var l = msg.length/4 + 2; // length (in 32-bit integers) of msg + ‘1’ + appended length
var N = Math.ceil(l/16); // number of 16-integer-blocks required to hold 'l' ints
var M = new Array(N);
for (var i=0; i<N; i++) {
M[i] = new Array(16);
for (var j=0; j<16; j++) { // encode 4 chars per integer, big-endian encoding
M[i][j] = (msg.charCodeAt(i*64+j*4)<<24) | (msg.charCodeAt(i*64+j*4+1)<<16) |
(msg.charCodeAt(i*64+j*4+2)<<8) | (msg.charCodeAt(i*64+j*4+3));
} // note running off the end of msg is ok 'cos bitwise ops on NaN return 0
}
// add length (in bits) into final pair of 32-bit integers (big-endian) [§5.1.1]
// note: most significant word would be (len-1)*8 >>> 32, but since JS converts
// bitwise-op args to 32 bits, we need to simulate this by arithmetic operators
M[N-1][14] = ((msg.length-1)*8) / Math.pow(2, 32); M[N-1][14] = Math.floor(M[N-1][14]);
M[N-1][15] = ((msg.length-1)*8) & 0xffffffff;
// set initial hash value [§5.3.1]
var H0 = 0x67452301;
var H1 = 0xefcdab89;
var H2 = 0x98badcfe;
var H3 = 0x10325476;
var H4 = 0xc3d2e1f0;
// HASH COMPUTATION [§6.1.2]
var W = new Array(80); var a, b, c, d, e;
for (var i=0; i<N; i++) {
// 1 - prepare message schedule 'W'
for (var t=0; t<16; t++) W[t] = M[i][t];
for (var t=16; t<80; t++) W[t] = Sha1.ROTL(W[t-3] ^ W[t-8] ^ W[t-14] ^ W[t-16], 1);
// 2 - initialise five working variables a, b, c, d, e with previous hash value
a = H0; b = H1; c = H2; d = H3; e = H4;
// 3 - main loop
for (var t=0; t<80; t++) {
var s = Math.floor(t/20); // seq for blocks of 'f' functions and 'K' constants
var T = (Sha1.ROTL(a,5) + Sha1.f(s,b,c,d) + e + K[s] + W[t]) & 0xffffffff;
e = d;
d = c;
c = Sha1.ROTL(b, 30);
b = a;
a = T;
}
// 4 - compute the new intermediate hash value (note 'addition modulo 2^32')
H0 = (H0+a) & 0xffffffff;
H1 = (H1+b) & 0xffffffff;
H2 = (H2+c) & 0xffffffff;
H3 = (H3+d) & 0xffffffff;
H4 = (H4+e) & 0xffffffff;
}
return Sha1.toHexStr(H0) + Sha1.toHexStr(H1) + Sha1.toHexStr(H2) +
Sha1.toHexStr(H3) + Sha1.toHexStr(H4);
};
/**
* Function 'f' [§4.1.1].
* @private
*/
Sha1.f = function(s, x, y, z) {
switch (s) {
case 0: return (x & y) ^ (~x & z); // Ch()
case 1: return x ^ y ^ z; // Parity()
case 2: return (x & y) ^ (x & z) ^ (y & z); // Maj()
case 3: return x ^ y ^ z; // Parity()
}
};
/**
* Rotates left (circular left shift) value x by n positions [§3.2.5].
* @private
*/
Sha1.ROTL = function(x, n) {
return (x<<n) | (x>>>(32-n));
};
/**
* Hexadecimal representation of a number.
* @private
*/
Sha1.toHexStr = function(n) {
// note can't use toString(16) as it is implementation-dependant,
// and in IE returns signed numbers when used on full words
var s="", v;
for (var i=7; i>=0; i--) { v = (n>>>(i*4)) & 0xf; s += v.toString(16); }
return s;
};
/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
/** Extend String object with method to encode multi-byte string to utf8
* - monsur.hossa.in/2012/07/20/utf-8-in-javascript.html */
if (typeof String.prototype.utf8Encode == 'undefined') {
String.prototype.utf8Encode = function() {
return unescape( encodeURIComponent( this ) );
};
}
/** Extend String object with method to decode utf8 string to multi-byte */
if (typeof String.prototype.utf8Decode == 'undefined') {
String.prototype.utf8Decode = function() {
try {
return decodeURIComponent( escape( this ) );
} catch (e) {
return this; // invalid UTF-8? return as-is
}
};
}<|fim▁hole|>/* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
if (typeof module != 'undefined' && module.exports) module.exports = Sha1; // CommonJs export
if (typeof define == 'function' && define.amd) define([], function() { return Sha1; }); // AMD<|fim▁end|> | |
<|file_name|>OOMPpart_RESE_0805_X_O202_01.py<|end_file_name|><|fim▁begin|>import OOMP
newPart = OOMP.oompItem(9439)
newPart.addTag("oompType", "RESE")
newPart.addTag("oompSize", "0805")
newPart.addTag("oompColor", "X")
newPart.addTag("oompDesc", "O202")<|fim▁hole|>newPart.addTag("oompIndex", "01")
OOMP.parts.append(newPart)<|fim▁end|> | |
<|file_name|>OutFundTrans.java<|end_file_name|><|fim▁begin|>/* */ package com.hundsun.network.gates.houchao.biz.services.pojo;
/* */
/* */ import org.springframework.context.annotation.Scope;
/* */ import org.springframework.stereotype.Service;
/* */
/* */ @Service("outFundTrans")
/* */ @Scope("prototype")
/* */ public class OutFundTrans extends InOutFundTrans
/* */ {
/* */ protected boolean isTrans()
/* */ {
/* 26 */ return true;
/* */ }
/* */
/* */ protected boolean isOutFund()
/* */ {
/* 31 */ return true;
/* */ }
/* */
/* */ protected boolean isNeedRecordUncomeFund()
/* */ {
/* 39 */ return false;
/* */ }
/* */
/* */ protected boolean isInOutTrans()
/* */ {
/* 49 */ return true;
<|fim▁hole|>
/* Location: E:\__安装归档\linquan-20161112\deploy16\houchao\webroot\WEB-INF\classes\
* Qualified Name: com.hundsun.network.gates.houchao.biz.services.pojo.OutFundTrans
* JD-Core Version: 0.6.0
*/<|fim▁end|> | /* */ }
/* */ }
|
<|file_name|>LanguageSettings.py<|end_file_name|><|fim▁begin|># The contents of this file are subject to the BitTorrent Open Source License
# Version 1.1 (the License). You may not copy or use this file, in either
# source code or executable form, except in compliance with the License. You
# may obtain a copy of the License at http://www.bittorrent.com/license/.
#
# Software distributed under the License is distributed on an AS IS basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
# written by Matt Chisholm
import wx
from BTL.defer import ThreadedDeferred
from BTL.language import languages, language_names
from BTL.platform import app_name
from BitTorrent.platform import read_language_file, write_language_file
from BitTorrent.GUI_wx import SPACING, VSizer, gui_wrap, text_wrappable
error_color = wx.Colour(192,0,0)
class LanguageSettings(wx.Panel):
def __init__(self, parent, *a, **k):
wx.Panel.__init__(self, parent, *a, **k)
self.sizer = VSizer()
self.SetSizer(self.sizer)
if 'errback' in k:
self.errback = k.pop('errback')
else:
self.errback = self.set_language_failed
# widgets
self.box = wx.StaticBox(self, label="Translate %s into:" % app_name)
self.language_names = ["System default",] + [language_names[l] for l in languages]
languages.insert(0, '')
self.languages = languages
self.choice = wx.Choice(self, choices=self.language_names)
self.Bind(wx.EVT_CHOICE, self.set_language, self.choice)
restart = wx.StaticText(self, -1,
"You must restart %s for the\nlanguage "
"setting to take effect." % app_name)
self.bottom_error = wx.StaticText(self, -1, '')
self.bottom_error.SetForegroundColour(error_color)
# sizers
self.box_sizer = wx.StaticBoxSizer(self.box, wx.VERTICAL)
# set menu selection and warning item if necessary
self.valid = True
lang = read_language_file()
if lang is not None:
try:
i = self.languages.index(lang)
self.choice.SetSelection(i)
except ValueError, e:
self.top_error = wx.StaticText(self, -1,
"This version of %s does not \nsupport the language '%s'."%(app_name,lang),)
self.top_error.SetForegroundColour(error_color)
<|fim▁hole|> self.choice.SetSelection(len(self.languages))
self.valid = False
else:
self.choice.SetSelection(0)
# other sizers
self.box_sizer.Add(self.choice, flag=wx.GROW|wx.ALL, border=SPACING)
self.box_sizer.Add(restart, flag=wx.BOTTOM|wx.LEFT|wx.RIGHT, border=SPACING)
self.box_sizer.Add(self.bottom_error, flag=wx.BOTTOM|wx.LEFT|wx.RIGHT, border=SPACING)
# clear out bottom error
self.clear_error()
self.sizer.AddFirst(self.box_sizer, flag=wx.GROW)
self.sizer.Fit(self)
def set_language(self, *a):
index = self.choice.GetSelection()
if index >= len(self.languages):
return
l = self.languages[index]
if not self.valid:
self.choice.Delete(len(self.languages))
self.choice.SetSelection(index)
self.valid = True
self.box_sizer.Detach(0)
self.top_error.Destroy()
self.box_sizer.Layout()
self.sizer.Layout()
d = ThreadedDeferred(gui_wrap, write_language_file, l)
d.addErrback(lambda e: self.set_language_failed(e, l))
d.addCallback(lambda r: self.language_was_set())
def language_was_set(self, *a):
self.clear_error()
wx.MessageBox("You must restart %s for the language "
"setting to take effect." % app_name,
"%s translation" % app_name,
style=wx.ICON_INFORMATION)
def clear_error(self):
index = self.box_sizer.GetItem(self.bottom_error)
if index:
self.box_sizer.Detach(self.bottom_error)
self.bottom_error.SetLabel('')
self.refit()
def set_error(self, errstr):
index = self.box_sizer.GetItem(self.bottom_error)
if not index:
self.box_sizer.Add(self.bottom_error, flag=wx.BOTTOM|wx.LEFT|wx.RIGHT, border=SPACING)
self.bottom_error.SetLabel(errstr)
if text_wrappable: self.bottom_error.Wrap(250)
self.refit()
def set_language_failed(self, e, l):
errstr = 'Could not find translation for language "%s"' % l
wx.the_app.logger.error(errstr, exc_info=e)
errstr = errstr + '\n%s: %s' % (str(e[0]), unicode(e[1].args[0]))
self.set_error(errstr)
def refit(self):
self.box_sizer.Layout()
self.sizer.Layout()
#self.sizer.Fit(self)
self.GetParent().Fit()<|fim▁end|> | self.box_sizer.Add(self.top_error, flag=wx.TOP|wx.LEFT|wx.RIGHT, border=SPACING)
# BUG add menu separator
# BUG change color of extra menu item
self.choice.Append(lang) |
<|file_name|>DotNet.java<|end_file_name|><|fim▁begin|>import java.security.Security;
import java.util.Base64;
import org.bouncycastle.crypto.BlockCipher;
import org.bouncycastle.crypto.BufferedBlockCipher;
import org.bouncycastle.crypto.engines.RijndaelEngine;
import org.bouncycastle.crypto.modes.CBCBlockCipher;
import org.bouncycastle.crypto.paddings.PKCS7Padding;
import org.bouncycastle.crypto.paddings.PaddedBufferedBlockCipher;
import org.bouncycastle.crypto.params.KeyParameter;
import org.bouncycastle.crypto.params.ParametersWithIV;
<|fim▁hole|> (byte)185,
(byte)186,
(byte)161,
(byte)188,
(byte)43,
(byte)253,
(byte)224,
(byte)76,
(byte)24,
(byte)133,
(byte)9,
(byte)201,
(byte)173,
(byte)255,
(byte)152,
(byte)113,
(byte)171,
(byte)225,
(byte)163,
(byte)121,
(byte)177,
(byte)211,
(byte)18,
(byte)50,
(byte)50,
(byte)219,
(byte)190,
(byte)168,
(byte)138,
(byte)97,
(byte)197
};
static byte[] initVector = new byte[] {
(byte) 8,
(byte) 173,
(byte) 47,
(byte) 130,
(byte) 199,
(byte) 242,
(byte) 20,
(byte) 211,
(byte) 63,
(byte) 47,
(byte) 254,
(byte) 173,
(byte) 163,
(byte) 245,
(byte) 242,
(byte) 232,
(byte) 11,
(byte) 244,
(byte) 134,
(byte) 249,
(byte) 44,
(byte) 123,
(byte) 138,
(byte) 109,
(byte) 155,
(byte) 173,
(byte) 122,
(byte) 76,
(byte) 93,
(byte) 125,
(byte) 185,
(byte) 66
};
public static String decrypt(byte[] key, byte[] initVector, byte[] encrypted) {
try {
BlockCipher engine = new RijndaelEngine(256);
CBCBlockCipher cbc = new CBCBlockCipher(engine);
BufferedBlockCipher cipher = new PaddedBufferedBlockCipher(cbc, new PKCS7Padding());
cipher.init(false, new ParametersWithIV(new KeyParameter(key), initVector));
int minSize = cipher.getOutputSize(encrypted.length);
byte[] outBuf = new byte[minSize];
int length1 = cipher.processBytes(encrypted, 0, encrypted.length, outBuf, 0);
int length2 = cipher.doFinal(outBuf, length1);
int actualLength = length1 + length2;
byte[] result = new byte[actualLength];
System.arraycopy(outBuf, 0, result, 0, result.length);
return new String(result);
} catch (Exception ex) {
ex.printStackTrace();
}
return null;
}
public static void main(String[] args) {
Security.addProvider(new org.bouncycastle.jce.provider.BouncyCastleProvider());
String sha64 = "SxTgWtrMjXY/dA50Kk20PkNeNLQ=";
byte[] k = Base64.getDecoder().decode(sha64);
System.out.println("Buffer :: "+Base64.getEncoder().encodeToString(buffer)+" --> length "+buffer.length);
System.out.println("Key(Sha) :: "+Base64.getEncoder().encodeToString(k)+" --> length "+k.length);
System.out.println("IV :: "+Base64.getEncoder().encodeToString(initVector)+" --> length "+initVector.length);
System.out.println(decrypt(k, initVector, buffer));
}
}<|fim▁end|> | public class DotNet {
static byte[] buffer = new byte[] {
(byte)17, |
<|file_name|>extension.py<|end_file_name|><|fim▁begin|># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Composer Extension
Downloads, installs and runs Composer.
"""
import os
import os.path
import sys
import logging
import re
import json
import StringIO
from build_pack_utils import utils
from build_pack_utils import stream_output
from extension_helpers import ExtensionHelper
from build_pack_utils.compile_extensions import CompileExtensions
_log = logging.getLogger('composer')
def find_composer_paths(ctx):
build_dir = ctx['BUILD_DIR']
webdir = ctx['WEBDIR']
json_path = None
lock_path = None
json_paths = [
os.path.join(build_dir, 'composer.json'),
os.path.join(build_dir, webdir, 'composer.json')
]
lock_paths = [
os.path.join(build_dir, 'composer.lock'),
os.path.join(build_dir, webdir, 'composer.lock')
]
env_path = os.getenv('COMPOSER_PATH')
if env_path is not None:
json_paths = json_paths + [
os.path.join(build_dir, env_path, 'composer.json'),
os.path.join(build_dir, webdir, env_path, 'composer.json')
]
lock_paths = lock_paths + [
os.path.join(build_dir, env_path, 'composer.lock'),
os.path.join(build_dir, webdir, env_path, 'composer.lock')
]
for path in json_paths:
if os.path.exists(path):
json_path = path
for path in lock_paths:
if os.path.exists(path):
lock_path = path
return (json_path, lock_path)
class ComposerConfiguration(object):
def __init__(self, ctx):
self._ctx = ctx
self._log = _log
self._init_composer_paths()
def _init_composer_paths(self):
(self.json_path, self.lock_path) = \
find_composer_paths(self._ctx)
def read_exts_from_path(self, path):
exts = []
if path:
req_pat = re.compile(r'"require"\s?\:\s?\{(.*?)\}', re.DOTALL)
ext_pat = re.compile(r'"ext-(.*?)"')
with open(path, 'rt') as fp:
data = fp.read()
for req_match in req_pat.finditer(data):
for ext_match in ext_pat.finditer(req_match.group(1)):
exts.append(ext_match.group(1))
return exts
def pick_php_version(self, requested):
selected = None
if requested is None:
selected = self._ctx['PHP_VERSION']
elif requested == '5.5.*' or requested == '>=5.5':
selected = self._ctx['PHP_55_LATEST']
elif requested == '5.6.*' or requested == '>=5.6':
selected = self._ctx['PHP_56_LATEST']
elif requested == '7.0.*' or requested == '>=7.0':
selected = self._ctx['PHP_70_LATEST']
elif requested.startswith('5.5.'):
selected = requested
elif requested.startswith('5.6.'):
selected = requested
elif requested.startswith('7.0.'):
selected = requested
else:
selected = self._ctx['PHP_VERSION']
return selected
def read_version_from_composer(self, key):
(json_path, lock_path) = find_composer_paths(self._ctx)
if json_path is not None:
composer = json.load(open(json_path, 'r'))
require = composer.get('require', {})
return require.get(key, None)
if lock_path is not None:
composer = json.load(open(lock_path, 'r'))
platform = composer.get('platform', {})
return platform.get(key, None)
return None
def configure(self):
if self.json_path or self.lock_path:
exts = []
# include any existing extensions
exts.extend(self._ctx.get('PHP_EXTENSIONS', []))
# add 'openssl' extension
exts.append('openssl')
# add platform extensions from composer.json & composer.lock
exts.extend(self.read_exts_from_path(self.json_path))
exts.extend(self.read_exts_from_path(self.lock_path))
# update context with new list of extensions,
# if composer.json exists
php_version = self.read_version_from_composer('php')
self._log.debug('Composer picked PHP Version [%s]',
php_version)
self._ctx['PHP_VERSION'] = self.pick_php_version(php_version)
self._ctx['PHP_EXTENSIONS'] = utils.unique(exts)
self._ctx['PHP_VM'] = 'php'
<|fim▁hole|>class ComposerExtension(ExtensionHelper):
def __init__(self, ctx):
ExtensionHelper.__init__(self, ctx)
self._log = _log
def _defaults(self):
manifest_file_path = os.path.join(self._ctx["BP_DIR"], "manifest.yml")
compile_ext = CompileExtensions(self._ctx["BP_DIR"])
_, default_version = compile_ext.default_version_for(manifest_file_path=manifest_file_path, dependency="composer")
return {
'COMPOSER_VERSION': default_version,
'COMPOSER_PACKAGE': 'composer.phar',
'COMPOSER_DOWNLOAD_URL': '/composer/'
'{COMPOSER_VERSION}/{COMPOSER_PACKAGE}',
'COMPOSER_INSTALL_OPTIONS': ['--no-interaction', '--no-dev'],
'COMPOSER_VENDOR_DIR': '{BUILD_DIR}/{LIBDIR}/vendor',
'COMPOSER_BIN_DIR': '{BUILD_DIR}/php/bin',
'COMPOSER_CACHE_DIR': '{CACHE_DIR}/composer'
}
def _should_compile(self):
(json_path, lock_path) = \
find_composer_paths(self._ctx)
return (json_path is not None or lock_path is not None)
def _compile(self, install):
self._builder = install.builder
self.composer_runner = ComposerCommandRunner(self._ctx, self._builder)
self.move_local_vendor_folder()
self.install()
self.run()
def move_local_vendor_folder(self):
vendor_path = os.path.join(self._ctx['BUILD_DIR'],
self._ctx['WEBDIR'],
'vendor')
if os.path.exists(vendor_path):
self._log.debug("Vendor [%s] exists, moving to LIBDIR",
vendor_path)
(self._builder.move()
.under('{BUILD_DIR}/{WEBDIR}')
.into('{BUILD_DIR}/{LIBDIR}')
.where_name_matches('^%s/.*$' % vendor_path)
.done())
def install(self):
self._builder.install().package('PHP').done()
if self._ctx['COMPOSER_VERSION'] == 'latest':
dependencies_path = os.path.join(self._ctx['BP_DIR'],
'dependencies')
if os.path.exists(dependencies_path):
raise RuntimeError('"COMPOSER_VERSION": "latest" ' \
'is not supported in the cached buildpack. Please vendor your preferred version of composer with your app, or use the provided default composer version.')
self._ctx['COMPOSER_DOWNLOAD_URL'] = \
'https://getcomposer.org/composer.phar'
self._builder.install()._installer.install_binary_direct(
self._ctx['COMPOSER_DOWNLOAD_URL'], None,
os.path.join(self._ctx['BUILD_DIR'], 'php', 'bin'),
extract=False)
else:
self._builder.install()._installer._install_binary_from_manifest(
self._ctx['COMPOSER_DOWNLOAD_URL'],
os.path.join(self._ctx['BUILD_DIR'], 'php', 'bin'),
extract=False)
def _github_oauth_token_is_valid(self, candidate_oauth_token):
stringio_writer = StringIO.StringIO()
curl_command = 'curl -H "Authorization: token %s" ' \
'https://api.github.com/rate_limit' % candidate_oauth_token
stream_output(stringio_writer,
curl_command,
env=os.environ,
cwd=self._ctx['BUILD_DIR'],
shell=True)
github_response = stringio_writer.getvalue()
github_response_json = json.loads(github_response)
return 'resources' in github_response_json
def _github_rate_exceeded(self, token_is_valid):
stringio_writer = StringIO.StringIO()
if token_is_valid:
candidate_oauth_token = os.getenv('COMPOSER_GITHUB_OAUTH_TOKEN')
curl_command = 'curl -H "Authorization: token %s" ' \
'https://api.github.com/rate_limit' % candidate_oauth_token
else:
curl_command = 'curl https://api.github.com/rate_limit'
stream_output(stringio_writer,
curl_command,
env=os.environ,
cwd=self._ctx['BUILD_DIR'],
shell=True)
github_response = stringio_writer.getvalue()
github_response_json = json.loads(github_response)
rate = github_response_json['rate']
num_remaining = rate['remaining']
return num_remaining <= 0
def setup_composer_github_token(self):
github_oauth_token = os.getenv('COMPOSER_GITHUB_OAUTH_TOKEN')
if self._github_oauth_token_is_valid(github_oauth_token):
print('-----> Using custom GitHub OAuth token in'
' $COMPOSER_GITHUB_OAUTH_TOKEN')
self.composer_runner.run('config', '-g',
'github-oauth.github.com',
'"%s"' % github_oauth_token)
return True
else:
print('-----> The GitHub OAuth token supplied from '
'$COMPOSER_GITHUB_OAUTH_TOKEN is invalid')
return False
def check_github_rate_exceeded(self, token_is_valid):
if self._github_rate_exceeded(token_is_valid):
print('-----> The GitHub api rate limit has been exceeded. '
'Composer will continue by downloading from source, which might result in slower downloads. '
'You can increase your rate limit with a GitHub OAuth token. '
'Please obtain a GitHub OAuth token by registering your application at '
'https://github.com/settings/applications/new. '
'Then set COMPOSER_GITHUB_OAUTH_TOKEN in your environment to the value of this token.')
def run(self):
# Move composer files into root directory
(json_path, lock_path) = find_composer_paths(self._ctx)
if json_path is not None and os.path.dirname(json_path) != self._ctx['BUILD_DIR']:
(self._builder.move()
.under(os.path.dirname(json_path))
.where_name_is('composer.json')
.into('BUILD_DIR')
.done())
if lock_path is not None and os.path.dirname(lock_path) != self._ctx['BUILD_DIR']:
(self._builder.move()
.under(os.path.dirname(lock_path))
.where_name_is('composer.lock')
.into('BUILD_DIR')
.done())
# Sanity Checks
if not os.path.exists(os.path.join(self._ctx['BUILD_DIR'],
'composer.lock')):
msg = (
'PROTIP: Include a `composer.lock` file with your '
'application! This will make sure the exact same version '
'of dependencies are used when you deploy to CloudFoundry.')
self._log.warning(msg)
print msg
# dump composer version, if in debug mode
if self._ctx.get('BP_DEBUG', False):
self.composer_runner.run('-V')
if not os.path.exists(os.path.join(self._ctx['BP_DIR'], 'dependencies')):
token_is_valid = False
# config composer to use github token, if provided
if os.getenv('COMPOSER_GITHUB_OAUTH_TOKEN', False):
token_is_valid = self.setup_composer_github_token()
# check that the api rate limit has not been exceeded, otherwise exit
self.check_github_rate_exceeded(token_is_valid)
# install dependencies w/Composer
self.composer_runner.run('install', '--no-progress',
*self._ctx['COMPOSER_INSTALL_OPTIONS'])
class ComposerCommandRunner(object):
def __init__(self, ctx, builder):
self._log = _log
self._ctx = ctx
self._strategy = PHPComposerStrategy(ctx)
self._php_path = self._strategy.binary_path()
self._composer_path = os.path.join(ctx['BUILD_DIR'], 'php',
'bin', 'composer.phar')
self._strategy.write_config(builder)
def _build_composer_environment(self):
env = {}
for key in os.environ.keys():
val = self._ctx.get(key, '')
env[key] = val if type(val) == str else json.dumps(val)
# add basic composer vars
env['COMPOSER_VENDOR_DIR'] = self._ctx['COMPOSER_VENDOR_DIR']
env['COMPOSER_BIN_DIR'] = self._ctx['COMPOSER_BIN_DIR']
env['COMPOSER_CACHE_DIR'] = self._ctx['COMPOSER_CACHE_DIR']
# prevent key system variables from being overridden
env['LD_LIBRARY_PATH'] = self._strategy.ld_library_path()
env['PHPRC'] = self._ctx['TMPDIR']
env['PATH'] = ':'.join(filter(None,
[env.get('PATH', ''),
os.path.dirname(self._php_path)]))
self._log.debug("ENV IS: %s",
'\n'.join(["%s=%s (%s)" % (key, val, type(val))
for (key, val) in env.iteritems()]))
return env
def run(self, *args):
try:
cmd = [self._php_path, self._composer_path]
cmd.extend(args)
self._log.debug("Running command [%s]", ' '.join(cmd))
stream_output(sys.stdout,
' '.join(cmd),
env=self._build_composer_environment(),
cwd=self._ctx['BUILD_DIR'],
shell=True)
except:
print "-----> Composer command failed"
raise
class PHPComposerStrategy(object):
def __init__(self, ctx):
self._ctx = ctx
def binary_path(self):
return os.path.join(
self._ctx['BUILD_DIR'], 'php', 'bin', 'php')
def write_config(self, builder):
# rewrite a temp copy of php.ini for use by composer
(builder.copy()
.under('{BUILD_DIR}/php/etc')
.where_name_is('php.ini')
.into('TMPDIR')
.done())
utils.rewrite_cfgs(os.path.join(self._ctx['TMPDIR'], 'php.ini'),
{'TMPDIR': self._ctx['TMPDIR'],
'HOME': self._ctx['BUILD_DIR']},
delim='@')
def ld_library_path(self):
return os.path.join(
self._ctx['BUILD_DIR'], 'php', 'lib')
# Extension Methods
def configure(ctx):
config = ComposerConfiguration(ctx)
config.configure()
def preprocess_commands(ctx):
composer = ComposerExtension(ctx)
return composer.preprocess_commands()
def service_commands(ctx):
composer = ComposerExtension(ctx)
return composer.service_commands()
def service_environment(ctx):
composer = ComposerExtension(ctx)
return composer.service_environment()
def compile(install):
composer = ComposerExtension(install.builder._ctx)
return composer.compile(install)<|fim▁end|> | |
<|file_name|>parserGenericsInVariableDeclaration1.ts<|end_file_name|><|fim▁begin|>var v : Foo<T> = 1;
var v : Foo<T>= 1;<|fim▁hole|>
var v : Foo<Bar<T>> = 1;
var v : Foo<Bar<T>>= 1;
var v : Foo<Bar<Quux<T>>> = 1;
var v : Foo<Bar<Quux<T>>>= 1;<|fim▁end|> | |
<|file_name|>web.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
pygments.lexers.web
~~~~~~~~~~~~~~~~~~~
Lexers for web-related languages and markup.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
try:
set
except NameError:
from sets import Set as set
from pygments.lexer import RegexLexer, bygroups, using, include, this
from pygments.token import \
Text, Comment, Operator, Keyword, Name, String, Number, Other, Punctuation
from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \
html_doctype_matches
__all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'CssLexer',
'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer',
'MxmlLexer']
class JavascriptLexer(RegexLexer):
"""
For JavaScript source code.
"""
name = 'JavaScript'
aliases = ['js', 'javascript']
filenames = ['*.js']
mimetypes = ['application/x-javascript', 'text/x-javascript', 'text/javascript']
flags = re.DOTALL
tokens = {
'commentsandwhitespace': [
(r'\s+', Text),
(r'<!--', Comment),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline)
],
'slashstartsregex': [
include('commentsandwhitespace'),
(r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
r'([gim]+\b|\B)', String.Regex, '#pop'),
(r'(?=/)', Text, ('#pop', 'badregex')),
(r'', Text, '#pop')
],
'badregex': [
('\n', Text, '#pop')
],
'root': [
(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'),
include('commentsandwhitespace'),
(r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'),
(r'[{(\[;,]', Punctuation, 'slashstartsregex'),
(r'[})\].]', Punctuation),
(r'(for|in|while|do|break|return|continue|switch|case|default|if|else|'
r'throw|try|catch|finally|new|delete|typeof|instanceof|void|'
r'this)\b', Keyword, 'slashstartsregex'),
(r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
(r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|'
r'extends|final|float|goto|implements|import|int|interface|long|native|'
r'package|private|protected|public|short|static|super|synchronized|throws|'
r'transient|volatile)\b', Keyword.Reserved),
(r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant),
(r'(Array|Boolean|Date|Error|Function|Math|netscape|'
r'Number|Object|Packages|RegExp|String|sun|decodeURI|'
r'decodeURIComponent|encodeURI|encodeURIComponent|'
r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|'
r'window)\b', Name.Builtin),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-fA-F]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
class ActionScriptLexer(RegexLexer):
"""<|fim▁hole|>
*New in Pygments 0.9.*
"""
name = 'ActionScript'
aliases = ['as', 'actionscript']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
flags = re.DOTALL
tokens = {
'root': [
(r'\s+', Text),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex),
(r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator),
(r'[{}\[\]();.]+', Punctuation),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|'
r'switch)\b', Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b',
Keyword.Constant),
(r'(Accessibility|AccessibilityProperties|ActionScriptVersion|'
r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|'
r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|'
r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|'
r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|'
r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|'
r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|'
r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|'
r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|'
r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|'
r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|'
r'EventPhase|ExternalInterface|FileFilter|FileReference|'
r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|'
r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|'
r'GradientGlowFilter|GradientType|Graphics|GridFitType|HTTPStatusEvent|'
r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput'
r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|'
r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|'
r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|'
r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|'
r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|'
r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|'
r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|'
r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|'
r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|'
r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|'
r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|'
r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|'
r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|'
r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|'
r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|'
r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|'
r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|'
r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|'
r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|'
r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|'
r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|'
r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b',
Name.Builtin),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b',Name.Function),
(r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
]
}
def analyse_text(text):
return 0.05
class ActionScript3Lexer(RegexLexer):
"""
For ActionScript 3 source code.
*New in Pygments 0.11.*
"""
name = 'ActionScript 3'
aliases = ['as3', 'actionscript3']
filenames = ['*.as']
mimetypes = ['application/x-actionscript', 'text/x-actionscript',
'text/actionscript']
identifier = r'[$a-zA-Z_][a-zA-Z0-9_]*'
flags = re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'\s+', Text),
(r'(function\s+)(' + identifier + r')(\s*)(\()',
bygroups(Keyword.Declaration, Name.Function, Text, Operator),
'funcparams'),
(r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' + identifier + r')',
bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text,
Keyword.Type)),
(r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)',
bygroups(Keyword, Text, Name.Namespace, Text)),
(r'(new)(\s+)(' + identifier + r')(\s*)(\()',
bygroups(Keyword, Text, Keyword.Type, Text, Operator)),
(r'//.*?\n', Comment.Single),
(r'/\*.*?\*/', Comment.Multiline),
(r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex),
(r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)),
(r'(case|default|for|each|in|while|do|break|return|continue|if|else|'
r'throw|try|catch|with|new|typeof|arguments|instanceof|this|'
r'switch|import|include|as|is)\b',
Keyword),
(r'(class|public|final|internal|native|override|private|protected|'
r'static|import|extends|implements|interface|intrinsic|return|super|'
r'dynamic|function|const|get|namespace|package|set)\b',
Keyword.Declaration),
(r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b',
Keyword.Constant),
(r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|'
r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|'
r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|'
r'unescape)\b', Name.Function),
(identifier, Name),
(r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'0x[0-9a-f]+', Number.Hex),
(r'[0-9]+', Number.Integer),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]();.-]+', Operator),
],
'funcparams': [
(r'\s+', Text),
(r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' +
identifier + r'|\*)(\s*)',
bygroups(Text, Punctuation, Name, Text, Operator, Text,
Keyword.Type, Text), 'defval'),
(r'\)', Operator, 'type')
],
'type': [
(r'(\s*)(:)(\s*)(' + identifier + r'|\*)',
bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'),
(r'\s*', Text, '#pop:2')
],
'defval': [
(r'(=)(\s*)([^(),]+)(\s*)(,?)',
bygroups(Operator, Text, using(this), Text, Operator), '#pop'),
(r',?', Operator, '#pop')
]
}
def analyse_text(text):
if re.match(r'\w+\s*:\s*\w', text): return 0.3
return 0.1
class CssLexer(RegexLexer):
"""
For CSS (Cascading Style Sheets).
"""
name = 'CSS'
aliases = ['css']
filenames = ['*.css']
mimetypes = ['text/css']
tokens = {
'root': [
include('basics'),
],
'basics': [
(r'\s+', Text),
(r'/\*(?:.|\n)*?\*/', Comment),
(r'{', Punctuation, 'content'),
(r'\:[a-zA-Z0-9_-]+', Name.Decorator),
(r'\.[a-zA-Z0-9_-]+', Name.Class),
(r'\#[a-zA-Z0-9_-]+', Name.Function),
(r'@[a-zA-Z0-9_-]+', Keyword, 'atrule'),
(r'[a-zA-Z0-9_-]+', Name.Tag),
(r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single)
],
'atrule': [
(r'{', Punctuation, 'atcontent'),
(r';', Punctuation, '#pop'),
include('basics'),
],
'atcontent': [
include('basics'),
(r'}', Punctuation, '#pop:2'),
],
'content': [
(r'\s+', Text),
(r'}', Punctuation, '#pop'),
(r'url\(.*?\)', String.Other),
(r'^@.*?$', Comment.Preproc),
(r'(azimuth|background-attachment|background-color|'
r'background-image|background-position|background-repeat|'
r'background|border-bottom-color|border-bottom-style|'
r'border-bottom-width|border-left-color|border-left-style|'
r'border-left-width|border-right|border-right-color|'
r'border-right-style|border-right-width|border-top-color|'
r'border-top-style|border-top-width|border-bottom|'
r'border-collapse|border-left|border-width|border-color|'
r'border-spacing|border-style|border-top|border|caption-side|'
r'clear|clip|color|content|counter-increment|counter-reset|'
r'cue-after|cue-before|cue|cursor|direction|display|'
r'elevation|empty-cells|float|font-family|font-size|'
r'font-size-adjust|font-stretch|font-style|font-variant|'
r'font-weight|font|height|letter-spacing|line-height|'
r'list-style-type|list-style-image|list-style-position|'
r'list-style|margin-bottom|margin-left|margin-right|'
r'margin-top|margin|marker-offset|marks|max-height|max-width|'
r'min-height|min-width|opacity|orphans|outline|outline-color|'
r'outline-style|outline-width|overflow|padding-bottom|'
r'padding-left|padding-right|padding-top|padding|page|'
r'page-break-after|page-break-before|page-break-inside|'
r'pause-after|pause-before|pause|pitch|pitch-range|'
r'play-during|position|quotes|richness|right|size|'
r'speak-header|speak-numeral|speak-punctuation|speak|'
r'speech-rate|stress|table-layout|text-align|text-decoration|'
r'text-indent|text-shadow|text-transform|top|unicode-bidi|'
r'vertical-align|visibility|voice-family|volume|white-space|'
r'widows|width|word-spacing|z-index|bottom|left|'
r'above|absolute|always|armenian|aural|auto|avoid|baseline|'
r'behind|below|bidi-override|blink|block|bold|bolder|both|'
r'capitalize|center-left|center-right|center|circle|'
r'cjk-ideographic|close-quote|collapse|condensed|continuous|'
r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|'
r'decimal|default|digits|disc|dotted|double|e-resize|embed|'
r'extra-condensed|extra-expanded|expanded|fantasy|far-left|'
r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|'
r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|'
r'inherit|inline-table|inline|inset|inside|invert|italic|'
r'justify|katakana-iroha|katakana|landscape|larger|large|'
r'left-side|leftwards|level|lighter|line-through|list-item|'
r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|'
r'lower|low|medium|message-box|middle|mix|monospace|'
r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|'
r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|'
r'open-quote|outset|outside|overline|pointer|portrait|px|'
r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|'
r'rightwards|s-resize|sans-serif|scroll|se-resize|'
r'semi-condensed|semi-expanded|separate|serif|show|silent|'
r'slow|slower|small-caps|small-caption|smaller|soft|solid|'
r'spell-out|square|static|status-bar|super|sw-resize|'
r'table-caption|table-cell|table-column|table-column-group|'
r'table-footer-group|table-header-group|table-row|'
r'table-row-group|text|text-bottom|text-top|thick|thin|'
r'transparent|ultra-condensed|ultra-expanded|underline|'
r'upper-alpha|upper-latin|upper-roman|uppercase|url|'
r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|'
r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword),
(r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|'
r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|'
r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|'
r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|'
r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|'
r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|'
r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|'
r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|'
r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|'
r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|'
r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|'
r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|'
r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|'
r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|'
r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|'
r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|'
r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|'
r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|'
r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|'
r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|'
r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|'
r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|'
r'lightslategray|lawngreen|lightgreen|tomato|hotpink|'
r'lightyellow|lavenderblush|linen|mediumaquamarine|green|'
r'blueviolet|peachpuff)\b', Name.Builtin),
(r'\!important', Comment.Preproc),
(r'/\*(?:.|\n)*?\*/', Comment),
(r'\#[a-zA-Z0-9]{1,6}', Number),
(r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex)', Number),
(r'-?[0-9]+', Number),
(r'[~\^\*!%&<>\|+=@:,./?-]+', Operator),
(r'[\[\]();]+', Punctuation),
(r'"(\\\\|\\"|[^"])*"', String.Double),
(r"'(\\\\|\\'|[^'])*'", String.Single),
(r'[a-zA-Z][a-zA-Z0-9]+', Name)
]
}
class HtmlLexer(RegexLexer):
"""
For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted
by the appropriate lexer.
"""
name = 'HTML'
aliases = ['html']
filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt']
mimetypes = ['text/html', 'application/xhtml+xml']
flags = re.IGNORECASE | re.DOTALL
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*script\s*', Name.Tag, ('script-content', 'tag')),
(r'<\s*style\s*', Name.Tag, ('style-content', 'tag')),
(r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'),
(r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'),
(r'[a-zA-Z0-9_:-]+', Name.Attribute),
(r'/?\s*>', Name.Tag, '#pop'),
],
'script-content': [
(r'<\s*/\s*script\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)),
],
'style-content': [
(r'<\s*/\s*style\s*>', Name.Tag, '#pop'),
(r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)),
],
'attr': [
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
def analyse_text(text):
if html_doctype_matches(text):
return 0.5
class PhpLexer(RegexLexer):
"""
For `PHP <http://www.php.net/>`_ source code.
For PHP embedded in HTML, use the `HtmlPhpLexer`.
Additional options accepted:
`startinline`
If given and ``True`` the lexer starts highlighting with
php code (i.e.: no starting ``<?php`` required). The default
is ``False``.
`funcnamehighlighting`
If given and ``True``, highlight builtin function names
(default: ``True``).
`disabledmodules`
If given, must be a list of module names whose function names
should not be highlighted. By default all modules are highlighted
except the special ``'unknown'`` module that includes functions
that are known to php but are undocumented.
To get a list of allowed modules have a look into the
`_phpbuiltins` module:
.. sourcecode:: pycon
>>> from pygments.lexers._phpbuiltins import MODULES
>>> MODULES.keys()
['PHP Options/Info', 'Zip', 'dba', ...]
In fact the names of those modules match the module names from
the php documentation.
"""
name = 'PHP'
aliases = ['php', 'php3', 'php4', 'php5']
filenames = ['*.php', '*.php[345]']
mimetypes = ['text/x-php']
flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
tokens = {
'root': [
(r'<\?(php)?', Comment.Preproc, 'php'),
(r'[^<]+', Other),
(r'<', Other)
],
'php': [
(r'\?>', Comment.Preproc, '#pop'),
(r'<<<([a-zA-Z_][a-zA-Z0-9_]*)\n.*?\n\1\;?\n', String),
(r'\s+', Text),
(r'#.*?\n', Comment.Single),
(r'//.*?\n', Comment.Single),
# put the empty comment here, it is otherwise seen as
# the start of a docstring
(r'/\*\*/', Comment.Multiline),
(r'/\*\*.*?\*/', String.Doc),
(r'/\*.*?\*/', Comment.Multiline),
(r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Operator, Text, Name.Attribute)),
(r'[~!%^&*+=|:.<>/?@-]+', Operator),
(r'[\[\]{}();,]+', Punctuation),
(r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
(r'(function)(\s+)(&?)(\s*)',
bygroups(Keyword, Text, Operator, Text), 'functionname'),
(r'(const)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)',
bygroups(Keyword, Text, Name.Constant)),
(r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
r'FALSE|print|for|require|continue|foreach|require_once|'
r'declare|return|default|static|do|switch|die|stdClass|'
r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
r'virtual|endfor|include_once|while|endforeach|global|__FILE__|'
r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|'
r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|'
r'implements|public|private|protected|abstract|clone|try|'
r'catch|throw|this)\b', Keyword),
('(true|false|null)\b', Keyword.Constant),
(r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable),
(r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable),
('[a-zA-Z_][a-zA-Z0-9_]*', Name.Other),
(r"[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|"
r"0[xX][0-9a-fA-F]+[Ll]?", Number),
(r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
(r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
(r'"', String.Double, 'string'),
],
'classname': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Class, '#pop')
],
'functionname': [
(r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop')
],
'string': [
(r'"', String.Double, '#pop'),
(r'[^{$"\\]+', String.Double),
(r'\\([nrt\"$]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape),
(r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?',
String.Interpol),
(r'(\{\$\{)(.*?)(\}\})',
bygroups(String.Interpol, using(this, _startinline=True),
String.Interpol)),
(r'(\{)(\$.*?)(\})',
bygroups(String.Interpol, using(this, _startinline=True),
String.Interpol)),
(r'(\$\{)(\S+)(\})',
bygroups(String.Interpol, Name.Variable, String.Interpol)),
(r'[${\\]+', String.Double)
],
}
def __init__(self, **options):
self.funcnamehighlighting = get_bool_opt(
options, 'funcnamehighlighting', True)
self.disabledmodules = get_list_opt(
options, 'disabledmodules', ['unknown'])
self.startinline = get_bool_opt(options, 'startinline', False)
# private option argument for the lexer itself
if '_startinline' in options:
self.startinline = options.pop('_startinline')
# collect activated functions in a set
self._functions = set()
if self.funcnamehighlighting:
from pygments.lexers._phpbuiltins import MODULES
for key, value in MODULES.iteritems():
if key not in self.disabledmodules:
self._functions.update(value)
RegexLexer.__init__(self, **options)
def get_tokens_unprocessed(self, text):
stack = ['root']
if self.startinline:
stack.append('php')
for index, token, value in \
RegexLexer.get_tokens_unprocessed(self, text, stack):
if token is Name.Other:
if value in self._functions:
yield index, Name.Builtin, value
continue
yield index, token, value
def analyse_text(text):
rv = 0.0
if re.search(r'<\?(?!xml)', text):
rv += 0.3
if '?>' in text:
rv += 0.1
return rv
class XmlLexer(RegexLexer):
"""
Generic lexer for XML (eXtensible Markup Language).
"""
flags = re.MULTILINE | re.DOTALL
name = 'XML'
aliases = ['xml']
filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl']
mimetypes = ['text/xml', 'application/xml', 'image/svg+xml',
'application/rss+xml', 'application/atom+xml',
'application/xsl+xml', 'application/xslt+xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}
def analyse_text(text):
if looks_like_xml(text):
return 0.5
class XsltLexer(XmlLexer):
'''
A lexer for XSLT.
*New in Pygments 0.10.*
'''
name = 'XSLT'
aliases = ['xslt']
filenames = ['*.xsl', '*.xslt']
EXTRA_KEYWORDS = set([
'apply-imports', 'apply-templates', 'attribute',
'attribute-set', 'call-template', 'choose', 'comment',
'copy', 'copy-of', 'decimal-format', 'element', 'fallback',
'for-each', 'if', 'import', 'include', 'key', 'message',
'namespace-alias', 'number', 'otherwise', 'output', 'param',
'preserve-space', 'processing-instruction', 'sort',
'strip-space', 'stylesheet', 'template', 'text', 'transform',
'value-of', 'variable', 'when', 'with-param'
])
def get_tokens_unprocessed(self, text):
for index, token, value in XmlLexer.get_tokens_unprocessed(self, text):
m = re.match('</?xsl:([^>]*)/?>?', value)
if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS:
yield index, Keyword, value
else:
yield index, token, value
def analyse_text(text):
if looks_like_xml(text) and '<xsl' in text:
return 0.8
class MxmlLexer(RegexLexer):
"""
For MXML markup.
Nested AS3 in <script> tags is highlighted by the appropriate lexer.
"""
flags = re.MULTILINE | re.DOTALL
name = 'MXML'
aliases = ['mxml']
filenames = ['*.mxml']
mimetimes = ['text/xml', 'application/xml']
tokens = {
'root': [
('[^<&]+', Text),
(r'&\S*?;', Name.Entity),
(r'(\<\!\[CDATA\[)(.*?)(\]\]\>)',
bygroups(String, using(ActionScript3Lexer), String)),
('<!--', Comment, 'comment'),
(r'<\?.*?\?>', Comment.Preproc),
('<![^>]*>', Comment.Preproc),
(r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'),
(r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag),
],
'comment': [
('[^-]+', Comment),
('-->', Comment, '#pop'),
('-', Comment),
],
'tag': [
(r'\s+', Text),
(r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'),
(r'/?\s*>', Name.Tag, '#pop'),
],
'attr': [
('\s+', Text),
('".*?"', String, '#pop'),
("'.*?'", String, '#pop'),
(r'[^\s>]+', String, '#pop'),
],
}<|fim▁end|> | For ActionScript source code. |
<|file_name|>models.py<|end_file_name|><|fim▁begin|># ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Copyright 2012 Jens Hoffmann (hoffmaje)<|fim▁hole|># ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from django.db import models
from django.contrib.auth.models import User<|fim▁end|> | |
<|file_name|>memory.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import absolute_import
from apscheduler.jobstores.base import BaseJobStore, JobLookupError, ConflictingIdError
from apscheduler.util import datetime_to_utc_timestamp
class MemoryJobStore(BaseJobStore):
"""
Stores jobs in an array in RAM. Provides no persistence support.
Plugin alias: ``memory``
"""
def __init__(self):
super(MemoryJobStore, self).__init__()
self._jobs = [] # list of (job, timestamp), sorted by next_run_time and job id (ascending)
self._jobs_index = {} # id -> (job, timestamp) lookup table
def lookup_job(self, job_id):
return self._jobs_index.get(job_id, (None, None))[0]
def get_due_jobs(self, now):
now_timestamp = datetime_to_utc_timestamp(now)
pending = []
for job, timestamp in self._jobs:
if timestamp is None or timestamp > now_timestamp:
break
pending.append(job)
return pending
def get_next_run_time(self):
return self._jobs[0][0].next_run_time if self._jobs else None
def get_all_jobs(self):
return [j[0] for j in self._jobs]
def add_job(self, job):
if job.id in self._jobs_index:
raise ConflictingIdError(job.id)
timestamp = datetime_to_utc_timestamp(job.next_run_time)
index = self._get_job_index(timestamp, job.id)
self._jobs.insert(index, (job, timestamp))
self._jobs_index[job.id] = (job, timestamp)
def update_job(self, job):
old_job, old_timestamp = self._jobs_index.get(job.id, (None, None))
if old_job is None:
raise JobLookupError(job.id)<|fim▁hole|>
# If the next run time has not changed, simply replace the job in its present index.
# Otherwise, reinsert the job to the list to preserve the ordering.
old_index = self._get_job_index(old_timestamp, old_job.id)
new_timestamp = datetime_to_utc_timestamp(job.next_run_time)
if old_timestamp == new_timestamp:
self._jobs[old_index] = (job, new_timestamp)
else:
del self._jobs[old_index]
new_index = self._get_job_index(new_timestamp, job.id)
self._jobs.insert(new_index, (job, new_timestamp))
self._jobs_index[old_job.id] = (job, new_timestamp)
def remove_job(self, job_id):
job, timestamp = self._jobs_index.get(job_id, (None, None))
if job is None:
raise JobLookupError(job_id)
index = self._get_job_index(timestamp, job_id)
del self._jobs[index]
del self._jobs_index[job.id]
def remove_all_jobs(self):
self._jobs = []
self._jobs_index = {}
def shutdown(self):
self.remove_all_jobs()
def _get_job_index(self, timestamp, job_id):
"""
Returns the index of the given job, or if it's not found, the index where the job should be inserted based on
the given timestamp.
:type timestamp: int
:type job_id: str
"""
lo, hi = 0, len(self._jobs)
timestamp = float('inf') if timestamp is None else timestamp
while lo < hi:
mid = (lo + hi) // 2
mid_job, mid_timestamp = self._jobs[mid]
mid_timestamp = float('inf') if mid_timestamp is None else mid_timestamp
if mid_timestamp > timestamp:
hi = mid
elif mid_timestamp < timestamp:
lo = mid + 1
elif mid_job.id > job_id:
hi = mid
elif mid_job.id < job_id:
lo = mid + 1
else:
return mid
return lo<|fim▁end|> | |
<|file_name|>addFilter.js<|end_file_name|><|fim▁begin|>var LedgerRequestHandler = require('../../helpers/ledgerRequestHandler');
/**
* @api {post} /gl/:LEDGER_ID/add-filter add filter
* @apiGroup Ledger.Utils
* @apiVersion v1.0.0
*
* @apiDescription
* Add a filter for caching balances. This will speed up balance
* requests containing a matching filters.
*
* @apiParam {CounterpartyId[]} excludingCounterparties
* IDs of transaction counterparties to exclude with the filter
* @apiParam {AccountId[]} excludingContraAccounts
* IDs of transaction countra accounts to exclude with the filter
* @apiParam {CounterpartyId[]} [withCounterparties]
* IDs of transaction counterparties to limit with the filter. All others will be
* excluded.
*
* @apiParamExample {x-www-form-urlencoded} Request-Example:
* excludingCounterparties=foobar-llc,foobar-inc
* excludingContraAccounts=chase-saving,chase-checking
* withCounterparties=staples,ubs
*/
module.exports = new LedgerRequestHandler({
validateBody: {<|fim▁hole|> commitLedger: true
}).handle(function (options, cb) {
var excludingCounterparties = options.body.excludingCounterparties.split(',');
var excludingContraAccounts = options.body.excludingContraAccounts.split(',');
var withCounterparties = options.body.withCounterparties && options.body.withCounterparties.split(',');
var ledger = options.ledger;
ledger.registerFilter({
excludingCounterparties: excludingCounterparties,
excludingContraAccounts: excludingContraAccounts,
withCounterparties: withCounterparties
});
cb(null, ledger.toJson());
});<|fim▁end|> | 'excludingCounterparties': { type: 'string', required: true },
'excludingContraAccounts': { type: 'string', required: true },
'withCounterparties': { type: 'string' }
}, |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.contrib.sites.shortcuts import get_current_site
from django.core.management import call_command
from django.db import models, connections, transaction
from django.urls import reverse
from django_tenants.clone import CloneSchema
from .postgresql_backend.base import _check_schema_name
from .signals import post_schema_sync, schema_needs_to_be_sync
from .utils import get_creation_fakes_migrations, get_tenant_base_schema
from .utils import schema_exists, get_tenant_domain_model, get_public_schema_name, get_tenant_database_alias
class TenantMixin(models.Model):
"""
All tenant models must inherit this class.
"""
auto_drop_schema = False
"""
USE THIS WITH CAUTION!
Set this flag to true on a parent class if you want the schema to be
automatically deleted if the tenant row gets deleted.
"""
auto_create_schema = True
"""
Set this flag to false on a parent class if you don't want the schema
to be automatically created upon save.
"""
schema_name = models.CharField(max_length=63, unique=True, db_index=True,
validators=[_check_schema_name])
domain_url = None
"""
Leave this as None. Stores the current domain url so it can be used in the logs
"""
domain_subfolder = None
"""
Leave this as None. Stores the subfolder in subfolder routing was used
"""
_previous_tenant = []
class Meta:
abstract = True
def __enter__(self):
"""
Syntax sugar which helps in celery tasks, cron jobs, and other scripts
Usage:
with Tenant.objects.get(schema_name='test') as tenant:
# run some code in tenant test
# run some code in previous tenant (public probably)
"""
connection = connections[get_tenant_database_alias()]
self._previous_tenant.append(connection.tenant)
self.activate()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
connection = connections[get_tenant_database_alias()]
connection.set_tenant(self._previous_tenant.pop())
def activate(self):
"""
Syntax sugar that helps at django shell with fast tenant changing
Usage:
Tenant.objects.get(schema_name='test').activate()
"""
connection = connections[get_tenant_database_alias()]
connection.set_tenant(self)
@classmethod
def deactivate(cls):
"""
Syntax sugar, return to public schema
Usage:
test_tenant.deactivate()
# or simpler
Tenant.deactivate()
"""
connection = connections[get_tenant_database_alias()]
connection.set_schema_to_public()
def save(self, verbosity=1, *args, **kwargs):
connection = connections[get_tenant_database_alias()]
is_new = self.pk is None
has_schema = hasattr(connection, 'schema_name')
if has_schema and is_new and connection.schema_name != get_public_schema_name():
raise Exception("Can't create tenant outside the public schema. "
"Current schema is %s." % connection.schema_name)
elif has_schema and not is_new and connection.schema_name not in (self.schema_name, get_public_schema_name()):
raise Exception("Can't update tenant outside it's own schema or "
"the public schema. Current schema is %s."
% connection.schema_name)
super().save(*args, **kwargs)
if has_schema and is_new and self.auto_create_schema:
try:
self.create_schema(check_if_exists=True, verbosity=verbosity)
post_schema_sync.send(sender=TenantMixin, tenant=self.serializable_fields())
except Exception:
# We failed creating the tenant, delete what we created and
# re-raise the exception
self.delete(force_drop=True)
raise
elif is_new:
# although we are not using the schema functions directly, the signal might be registered by a listener
schema_needs_to_be_sync.send(sender=TenantMixin, tenant=self.serializable_fields())
elif not is_new and self.auto_create_schema and not schema_exists(self.schema_name):
# Create schemas for existing models, deleting only the schema on failure
try:
self.create_schema(check_if_exists=True, verbosity=verbosity)
post_schema_sync.send(sender=TenantMixin, tenant=self.serializable_fields())
except Exception:
# We failed creating the schema, delete what we created and
# re-raise the exception
self._drop_schema()
raise
def serializable_fields(self):
""" in certain cases the user model isn't serializable so you may want to only send the id """
return self
def _drop_schema(self, force_drop=False):
""" Drops the schema"""
connection = connections[get_tenant_database_alias()]
has_schema = hasattr(connection, 'schema_name')
if has_schema and connection.schema_name not in (self.schema_name, get_public_schema_name()):
raise Exception("Can't delete tenant outside it's own schema or "
"the public schema. Current schema is %s."
% connection.schema_name)
if has_schema and schema_exists(self.schema_name) and (self.auto_drop_schema or force_drop):
self.pre_drop()
cursor = connection.cursor()
cursor.execute('DROP SCHEMA "%s" CASCADE' % self.schema_name)
def pre_drop(self):
"""
This is a routine which you could override to backup the tenant schema before dropping.
:return:
"""
def delete(self, force_drop=False, *args, **kwargs):
"""
Deletes this row. Drops the tenant's schema if the attribute
auto_drop_schema set to True.
"""
self._drop_schema(force_drop)
super().delete(*args, **kwargs)
def create_schema(self, check_if_exists=False, sync_schema=True,<|fim▁hole|> schema was created, false otherwise.
"""
# safety check
connection = connections[get_tenant_database_alias()]
_check_schema_name(self.schema_name)
cursor = connection.cursor()
if check_if_exists and schema_exists(self.schema_name):
return False
fake_migrations = get_creation_fakes_migrations()
if sync_schema:
if fake_migrations:
# copy tables and data from provided model schema
base_schema = get_tenant_base_schema()
clone_schema = CloneSchema()
clone_schema.clone_schema(base_schema, self.schema_name)
call_command('migrate_schemas',
tenant=True,
fake=True,
schema_name=self.schema_name,
interactive=False,
verbosity=verbosity)
else:
# create the schema
cursor.execute('CREATE SCHEMA "%s"' % self.schema_name)
call_command('migrate_schemas',
tenant=True,
schema_name=self.schema_name,
interactive=False,
verbosity=verbosity)
connection.set_schema_to_public()
def get_primary_domain(self):
"""
Returns the primary domain of the tenant
"""
try:
domain = self.domains.get(is_primary=True)
return domain
except get_tenant_domain_model().DoesNotExist:
return None
def reverse(self, request, view_name):
"""
Returns the URL of this tenant.
"""
http_type = 'https://' if request.is_secure() else 'http://'
domain = get_current_site(request).domain
url = ''.join((http_type, self.schema_name, '.', domain, reverse(view_name)))
return url
def get_tenant_type(self):
"""
Get the type of tenant. Will only work for multi type tenants
:return: str
"""
return getattr(self, settings.MULTI_TYPE_DATABASE_FIELD)
class DomainMixin(models.Model):
"""
All models that store the domains must inherit this class
"""
domain = models.CharField(max_length=253, unique=True, db_index=True)
tenant = models.ForeignKey(settings.TENANT_MODEL, db_index=True, related_name='domains',
on_delete=models.CASCADE)
# Set this to true if this is the primary domain
is_primary = models.BooleanField(default=True, db_index=True)
@transaction.atomic
def save(self, *args, **kwargs):
# Get all other primary domains with the same tenant
domain_list = self.__class__.objects.filter(tenant=self.tenant, is_primary=True).exclude(pk=self.pk)
# If we have no primary domain yet, set as primary domain by default
self.is_primary = self.is_primary or (not domain_list.exists())
if self.is_primary:
# Remove primary status of existing domains for tenant
domain_list.update(is_primary=False)
super().save(*args, **kwargs)
class Meta:
abstract = True<|fim▁end|> | verbosity=1):
"""
Creates the schema 'schema_name' for this tenant. Optionally checks if
the schema already exists before creating it. Returns true if the |
<|file_name|>presentation_availability_state.cc<|end_file_name|><|fim▁begin|>// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "third_party/blink/renderer/modules/presentation/presentation_availability_state.h"
#include "third_party/blink/renderer/modules/presentation/presentation_availability_observer.h"
#include "third_party/blink/renderer/platform/scheduler/public/thread.h"
namespace blink {
PresentationAvailabilityState::PresentationAvailabilityState(
mojom::blink::PresentationService* presentation_service)
: presentation_service_(presentation_service) {}
PresentationAvailabilityState::~PresentationAvailabilityState() = default;
void PresentationAvailabilityState::RequestAvailability(
const Vector<KURL>& urls,
PresentationAvailabilityCallbacks* callback) {
auto screen_availability = GetScreenAvailability(urls);
// Reject Promise if screen availability is unsupported for all URLs.
if (screen_availability == mojom::blink::ScreenAvailability::DISABLED) {
Thread::Current()->GetTaskRunner()->PostTask(
FROM_HERE,
WTF::Bind(
&PresentationAvailabilityCallbacks::RejectAvailabilityNotSupported,
WrapPersistent(callback)));
// Do not listen to urls if we reject the promise.
return;
}
auto* listener = GetAvailabilityListener(urls);
if (!listener) {
listener = MakeGarbageCollected<AvailabilityListener>(urls);
availability_listeners_.emplace_back(listener);
}
if (screen_availability != mojom::blink::ScreenAvailability::UNKNOWN) {
Thread::Current()->GetTaskRunner()->PostTask(
FROM_HERE, WTF::Bind(&PresentationAvailabilityCallbacks::Resolve,
WrapPersistent(callback),
screen_availability ==
mojom::blink::ScreenAvailability::AVAILABLE));
} else {
listener->availability_callbacks.push_back(callback);
}
for (const auto& availability_url : urls)
StartListeningToURL(availability_url);
}
void PresentationAvailabilityState::AddObserver(
PresentationAvailabilityObserver* observer) {
const auto& urls = observer->Urls();
auto* listener = GetAvailabilityListener(urls);
if (!listener) {
listener = MakeGarbageCollected<AvailabilityListener>(urls);
availability_listeners_.emplace_back(listener);
}
if (listener->availability_observers.Contains(observer))
return;
listener->availability_observers.push_back(observer);
for (const auto& availability_url : urls)
StartListeningToURL(availability_url);
}
void PresentationAvailabilityState::RemoveObserver(
PresentationAvailabilityObserver* observer) {
const auto& urls = observer->Urls();
auto* listener = GetAvailabilityListener(urls);
if (!listener) {
DLOG(WARNING) << "Stop listening for availability for unknown URLs.";
return;
}
wtf_size_t slot = listener->availability_observers.Find(observer);
if (slot != kNotFound) {
listener->availability_observers.EraseAt(slot);
}
for (const auto& availability_url : urls)
MaybeStopListeningToURL(availability_url);
TryRemoveAvailabilityListener(listener);
}
void PresentationAvailabilityState::UpdateAvailability(
const KURL& url,
mojom::blink::ScreenAvailability availability) {
auto* listening_status = GetListeningStatus(url);
if (!listening_status)
return;
if (listening_status->listening_state == ListeningState::kWaiting)
listening_status->listening_state = ListeningState::kActive;
if (listening_status->last_known_availability == availability)
return;
listening_status->last_known_availability = availability;
HeapVector<Member<AvailabilityListener>> listeners = availability_listeners_;
for (auto& listener : listeners) {
if (!listener->urls.Contains<KURL>(url))
continue;
auto screen_availability = GetScreenAvailability(listener->urls);
DCHECK(screen_availability != mojom::blink::ScreenAvailability::UNKNOWN);
HeapVector<Member<PresentationAvailabilityObserver>> observers =
listener->availability_observers;
for (auto& observer : observers) {
observer->AvailabilityChanged(screen_availability);
}<|fim▁hole|> callback_ptr->RejectAvailabilityNotSupported();
}
} else {
for (auto& callback_ptr : listener->availability_callbacks) {
callback_ptr->Resolve(screen_availability ==
mojom::blink::ScreenAvailability::AVAILABLE);
}
}
listener->availability_callbacks.clear();
for (const auto& availability_url : listener->urls)
MaybeStopListeningToURL(availability_url);
TryRemoveAvailabilityListener(listener);
}
}
void PresentationAvailabilityState::Trace(Visitor* visitor) const {
visitor->Trace(availability_listeners_);
}
void PresentationAvailabilityState::StartListeningToURL(const KURL& url) {
auto* listening_status = GetListeningStatus(url);
if (!listening_status) {
listening_status = new ListeningStatus(url);
availability_listening_status_.emplace_back(listening_status);
}
// Already listening.
if (listening_status->listening_state != ListeningState::kInactive)
return;
listening_status->listening_state = ListeningState::kWaiting;
presentation_service_->ListenForScreenAvailability(url);
}
void PresentationAvailabilityState::MaybeStopListeningToURL(const KURL& url) {
for (const auto& listener : availability_listeners_) {
if (!listener->urls.Contains(url))
continue;
// URL is still observed by some availability object.
if (!listener->availability_callbacks.IsEmpty() ||
!listener->availability_observers.IsEmpty()) {
return;
}
}
auto* listening_status = GetListeningStatus(url);
if (!listening_status) {
LOG(WARNING) << "Stop listening to unknown url: " << url.GetString();
return;
}
if (listening_status->listening_state == ListeningState::kInactive)
return;
listening_status->listening_state = ListeningState::kInactive;
presentation_service_->StopListeningForScreenAvailability(url);
}
mojom::blink::ScreenAvailability
PresentationAvailabilityState::GetScreenAvailability(
const Vector<KURL>& urls) const {
bool has_disabled = false;
bool has_source_not_supported = false;
bool has_unavailable = false;
for (const auto& url : urls) {
auto* status = GetListeningStatus(url);
auto screen_availability = status
? status->last_known_availability
: mojom::blink::ScreenAvailability::UNKNOWN;
switch (screen_availability) {
case mojom::blink::ScreenAvailability::AVAILABLE:
return mojom::blink::ScreenAvailability::AVAILABLE;
case mojom::blink::ScreenAvailability::DISABLED:
has_disabled = true;
break;
case mojom::blink::ScreenAvailability::SOURCE_NOT_SUPPORTED:
has_source_not_supported = true;
break;
case mojom::blink::ScreenAvailability::UNAVAILABLE:
has_unavailable = true;
break;
case mojom::blink::ScreenAvailability::UNKNOWN:
break;
}
}
if (has_disabled)
return mojom::blink::ScreenAvailability::DISABLED;
if (has_source_not_supported)
return mojom::blink::ScreenAvailability::SOURCE_NOT_SUPPORTED;
if (has_unavailable)
return mojom::blink::ScreenAvailability::UNAVAILABLE;
return mojom::blink::ScreenAvailability::UNKNOWN;
}
PresentationAvailabilityState::AvailabilityListener*
PresentationAvailabilityState::GetAvailabilityListener(
const Vector<KURL>& urls) {
auto* listener_it = std::find_if(
availability_listeners_.begin(), availability_listeners_.end(),
[&urls](const auto& listener) { return listener->urls == urls; });
return listener_it == availability_listeners_.end() ? nullptr : *listener_it;
}
void PresentationAvailabilityState::TryRemoveAvailabilityListener(
AvailabilityListener* listener) {
// URL is still observed by some availability object.
if (!listener->availability_callbacks.IsEmpty() ||
!listener->availability_observers.IsEmpty()) {
return;
}
wtf_size_t slot = availability_listeners_.Find(listener);
if (slot != kNotFound) {
availability_listeners_.EraseAt(slot);
}
}
PresentationAvailabilityState::ListeningStatus*
PresentationAvailabilityState::GetListeningStatus(const KURL& url) const {
auto* status_it =
std::find_if(availability_listening_status_.begin(),
availability_listening_status_.end(),
[&url](const std::unique_ptr<ListeningStatus>& status) {
return status->url == url;
});
return status_it == availability_listening_status_.end() ? nullptr
: status_it->get();
}
PresentationAvailabilityState::AvailabilityListener::AvailabilityListener(
const Vector<KURL>& availability_urls)
: urls(availability_urls) {}
PresentationAvailabilityState::AvailabilityListener::~AvailabilityListener() =
default;
void PresentationAvailabilityState::AvailabilityListener::Trace(
blink::Visitor* visitor) const {
visitor->Trace(availability_callbacks);
visitor->Trace(availability_observers);
}
PresentationAvailabilityState::ListeningStatus::ListeningStatus(
const KURL& availability_url)
: url(availability_url),
last_known_availability(mojom::blink::ScreenAvailability::UNKNOWN),
listening_state(ListeningState::kInactive) {}
PresentationAvailabilityState::ListeningStatus::~ListeningStatus() = default;
} // namespace blink<|fim▁end|> |
if (screen_availability == mojom::blink::ScreenAvailability::DISABLED) {
for (auto& callback_ptr : listener->availability_callbacks) { |
<|file_name|>sessions.py<|end_file_name|><|fim▁begin|># Rekall Memory Forensics
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""
Darwin Session collectors.
"""
__author__ = "Adam Sindelar <[email protected]>"
from rekall.entities import definitions
from rekall.plugins.collectors.darwin import common
from rekall.plugins.collectors.darwin import zones
class DarwinTerminalUserInferor3000(common.DarwinEntityCollector):
"""Infers the relationship between usernames and UIDs using tty sessions."""
outputs = ["User"]
collect_args = dict(
terminals=("Terminal/file matches (has component Permissions) and "
"Terminal/session"))
complete_input = True
def collect(self, hint, terminals):
for terminal in terminals:
owner = terminal["Terminal/file"]["Permissions/owner"]
user = terminal["Terminal/session"]["Session/user"]
# Now tell the manager that these two users are the same user.
if owner and user:
yield user.identity | owner.identity
class DarwinTTYZoneCollector(zones.DarwinZoneElementCollector):
outputs = ["Struct/type=tty"]
zone_name = "ttys"
type_name = "tty"
def validate_element(self, tty):
return tty.t_lock == tty
class DarwinClistParser(common.DarwinEntityCollector):
outputs = ["Buffer/purpose=terminal_input",
"Buffer/purpose=terminal_output"]
collect_args = dict(clists="Struct/type is 'clist'")
def collect(self, hint, clists):
for entity in clists:
clist = entity["Struct/base"]
yield [entity.identity,
definitions.Buffer(kind="ring",
state="freed",
contents=clist.recovered_contents,
start=clist.c_cs,
end=clist.c_ce,
size=clist.c_cn)]
class DarwinTTYParser(common.DarwinEntityCollector):
outputs = ["Terminal", "Struct/type=vnode", "Struct/type=clist",
"Buffer/purpose=terminal_input",
"Buffer/purpose=terminal_output"]
collect_args = dict(ttys="Struct/type is 'tty'")
def collect(self, hint, ttys):
for entity in ttys:
file_identity = None
session_identity = None
tty = entity["Struct/base"]
session = tty.t_session.deref()
vnode = session.s_ttyvp
if session:
session_identity = self.manager.identify({
"Struct/base": session})
if vnode:
# Look, it has a vnode!
yield definitions.Struct(base=vnode,
type="vnode")
file_identity = self.manager.identify({
"Struct/base": vnode})
# Yield just the stubs of the input and output ring buffers.
# DarwinClistParser will grab these if it cares.
yield [definitions.Struct(base=tty.t_rawq,
type="clist"),
definitions.Buffer(purpose="terminal_input",
context=entity.identity)]
yield [definitions.Struct(base=tty.t_outq,
type="clist"),<|fim▁hole|>
# Last, but not least, the Terminal itself.
yield [entity.identity,
definitions.Terminal(
session=session_identity,
file=file_identity)]
class DarwinSessionParser(common.DarwinEntityCollector):
"""Collects session entities from the memory objects."""
_name = "sessions"
outputs = ["Session",
"User",
"Struct/type=tty",
"Struct/type=proc"]
collect_args = dict(sessions="Struct/type is 'session'")
def collect(self, hint, sessions):
for entity in sessions:
session = entity["Struct/base"]
# Have to sanitize the usernames to prevent issues when comparing
# them later.
username = str(session.s_login).replace("\x00", "")
if username:
user_identity = self.manager.identify({
"User/username": username})
yield [user_identity,
definitions.User(
username=username)]
else:
user_identity = None
sid = session.s_sid
# Turns out, SID is not always unique. This is disabled as it is
# not being currently used, and I need to investigate the causes
# of duplicate sessions occurring on 10.10.
# session_identity = self.manager.identify({
# "Session/sid": sid}) | entity.identity
session_identity = entity.identity
if session.s_ttyp:
yield definitions.Struct(
base=session.s_ttyp,
type="tty")
if session.s_leader and session.s_leader.validate():
yield definitions.Struct(
base=session.s_leader.deref(),
type="proc")
yield [session_identity,
definitions.Session(
user=user_identity,
sid=sid),
definitions.Named(
name="SID %d" % int(sid),
kind="Session")]
class DarwinSessionZoneCollector(zones.DarwinZoneElementCollector):
"""Collects sessions from the sessions allocation zone."""
outputs = ["Struct/type=session"]
zone_name = "session"
type_name = "session"
def validate_element(self, session):
return session.s_count > 0 and session.s_leader.p_argc > 0
class DarwinSessionCollector(common.DarwinEntityCollector):
"""Collects sessions."""
outputs = ["Struct/type=session"]
def collect(self, hint):
session_hash_table_size = self.profile.get_constant_object(
"_sesshash", "unsigned long")
# The hashtable is an array to session list heads.
session_hash_table = self.profile.get_constant_object(
"_sesshashtbl",
target="Pointer",
target_args=dict(
target="Array",
target_args=dict(
target="sesshashhead",
count=session_hash_table_size.v())))
for sesshashhead in session_hash_table:
for session in sesshashhead.lh_first.walk_list("s_hash.le_next"):
yield definitions.Struct(
base=session,
type="session")<|fim▁end|> | definitions.Buffer(purpose="terminal_output",
context=entity.identity)] |
<|file_name|>MacroHelper.java<|end_file_name|><|fim▁begin|>package net.jloop.rejoice;
import net.jloop.rejoice.types.Symbol;<|fim▁hole|>import java.util.Iterator;
import java.util.List;
public class MacroHelper {
public static List<Value> collect(Env env, Iterator<Value> input, Symbol terminator) {
List<Value> output = new ArrayList<>();
while (true) {
if (!input.hasNext()) {
throw new RuntimeError("MACRO", "Input stream ended before finding the terminating symbol '" + terminator.print() + "'");
}
Value next = input.next();
if (next instanceof Symbol) {
if (next.equals(terminator)) {
return output;
}
Function function = env.lookup((Symbol) next);
if (function instanceof Macro) {
env.trace().push((Symbol) next);
Iterator<Value> values = ((Macro) function).call(env, input);
while (values.hasNext()) {
output.add(values.next());
}
env.trace().pop();
} else {
output.add(next);
}
} else {
output.add(next);
}
}
}
@SuppressWarnings("unchecked")
public static <T extends Value> T match(Iterator<Value> input, Type type) {
if (!input.hasNext()) {
throw new RuntimeError("MACRO", "Unexpected EOF when attempting to match " + type.print());
}
Value next = input.next();
if (type == next.type()) {
return (T) next;
} else {
throw new RuntimeError("MACRO", "Expecting to match " + type.print() + ", but found " + next.type().print() + " with value '" + next.print() + "'");
}
}
public static void match(Iterator<Value> input, Symbol symbol) {
if (!input.hasNext()) {
throw new RuntimeError("MACRO", "Unexpected EOF when attempting to match ^symbol '" + symbol.print() + "'");
}
Value next = input.next();
if (!next.equals(symbol)) {
throw new RuntimeError("MACRO", "Expecting to match symbol '" + symbol.print() + "' , but found " + next.type().print() + " with value '" + next.print() + "'");
}
}
}<|fim▁end|> | import net.jloop.rejoice.types.Type;
import java.util.ArrayList; |
<|file_name|>count_custom_vhs.py<|end_file_name|><|fim▁begin|>import os, sys
import json
import copy
import numpy as np
import random
from multiprocessing import Pool
import ipdb
################################################################################################
utils_path = os.path.join(os.path.dirname(os.path.abspath(__file__)),'nlp scripts')
source_vh_dir = '/home/ronotex/Downloads/vector_hash/ingenierias_mineria'
#source_vh_dir = '/home/ronotex/Downloads/vector_hash/mantenimiento_en_minernia'
#treemap_name = 'carreras_rubro_mina'
#adj_name = 'ing_total_adjmatrix'
treemap_name = 'carreras_mantto_mina'
adj_name = 'mantto_mina_adjmatrix'
class LabelDict(dict):
def __init__(self, label_names=[]):
self.names = []
for name in label_names:
self.add(name)
def add(self, name):
label_id = len(self.names)
if name in self:
#warnings.warn('Ignoring duplicated label ' + name)
return self[name]
self[name] = label_id
self.names.append(name)
return label_id
def get_label_name(self, label_id):
return self.names[label_id]
def get_label_id(self, name):
if name not in self:
return -1
return self[name]
def size(self):
return len(self.names)
################################################################################################
hierarchy = json.loads(open('carreras_ing2.json').read())
# docname : {docname : true name}
nameByFile = json.loads(open('ident_names2.json').read())
fileByName = {}
temp={}
for (file,name) in nameByFile.items():
temp[file.strip(' ')] = name.strip(' ')
fileByName[name.strip(' ')] = file.strip(' ')
nameByFile = dict(temp)
################################################################################################
def sorter(T,sizeById, file_dict):
if "children" not in T:
_id = file_dict.get_label_id(fileByName[T["name"]])
try:
T["size"] = int(sizeById[_id])
except:
T["size"] = sizeById[_id]
return float(T["size"])
children = T["children"]
temp = []
_total = 0
for child in children:
subt_sum = sorter(child,sizeById, file_dict)
_total += subt_sum
temp.append(subt_sum)
temp = list(zip(temp,range(len(children))))
temp.sort(reverse=True)
T["children"] = [children[k[1]] for k in temp]
return _total
def getSortedLeaves(T, V,file_dict):
if "children" not in T:
fn = fileByName[ T["name"] ]
V.append(file_dict.get_label_id(fn))
return
for child in T["children"]:
getSortedLeaves(child,V,file_dict)
################################################################################################
################################################################################################
if __name__=='__main__':
vh_dict = LabelDict()
file_dict = LabelDict()
graph = np.zeros([30,30])
vhsByFile = [set() for i in range(30)]
freq_major = np.zeros([30])
for root,dirs,filenames in os.walk(source_vh_dir):
for f in filenames:
if f[-1]!='~':
#file_name = f[3:] # vh_name
#if file_name=='all' or file_name=='ing':
# continue
p = f.find('_mineria')
#p = f.find('_mantto_mineria')
file_name = f[3:p] # vh_name_mineria
#file_name = f[14:] # mantto_min_vh_name
id_file = file_dict.add(file_name)
for line in open(os.path.join(source_vh_dir,f)):
line = line.strip('\n')
if line!='':
id_vh = vh_dict.add(line)
freq_major[id_file]+=1
vhsByFile[id_file].add(id_vh)
count_id_vh = vh_dict.size()
count_id_file = file_dict.size()
print(count_id_vh)
print(count_id_file)
ipdb.set_trace()
# node
for k in range(count_id_file):
# posible edges
outgoing = set()
for i in range(count_id_file):
if k!=i:
temp = vhsByFile[k] & vhsByFile[i]
graph[k,i] = len(temp)
outgoing |= temp
graph[k,k] = freq_major[k] - len(outgoing)
# GENERATE CARRERAS.JSON
tot = sorter(hierarchy,freq_major,file_dict)
open(treemap_name+'.json','w').write(json.dumps(hierarchy,ensure_ascii=False, indent = 2))
per_hierarchy = dict(hierarchy)
temp = [format(x,'.2f') for x in 100.0*freq_major/count_id_vh]
tot = sorter(per_hierarchy,temp,file_dict)
open(treemap_name+'_perc.json','w').write(json.dumps(per_hierarchy,ensure_ascii=False, indent = 2))
# GENERATE ADJMATRIX.JSON
sorted_ids = []
getSortedLeaves(hierarchy,sorted_ids,file_dict)
adjmatrix = []
for k in sorted_ids:
if freq_major[k]==0:
continue
u = file_dict.get_label_name(k)
item = dict()
item["name"] = nameByFile[u]
item["size"] = int(freq_major[k])
item["imports"] = []
for i in sorted_ids:
if graph[k,i]>0:
v = file_dict.get_label_name(i)
imp = dict({'name':nameByFile[v],'weight':int(graph[k,i])})<|fim▁hole|><|fim▁end|> | item["imports"].append(imp)
adjmatrix.append(item)
open(adj_name + '.json','w').write(json.dumps(adjmatrix,ensure_ascii=False, indent = 2)) |
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017 Nick Gashkov
#
# Distributed under MIT License. See LICENSE file for details.
<|fim▁hole|>
class ValidationError(Exception):
def __init__(self, *args, **kwargs):
self.error_dict = kwargs.pop('error_dict')
super(ValidationError, self).__init__(*args, **kwargs)<|fim▁end|> | |
<|file_name|>tica.py<|end_file_name|><|fim▁begin|># This file is part of PyEMMA.
#
# Copyright (c) 2015, 2014 Computational Molecular Biology Group, Freie Universitaet Berlin (GER)
#
# PyEMMA is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
Created on 19.01.2015
@author: marscher
'''
import numpy as np
from pyemma._base.serialization.serialization import SerializableMixIn
from pyemma._ext.variational.solvers.direct import eig_corr
from pyemma._ext.variational.util import ZeroRankError
from pyemma.coordinates.estimation.covariance import LaggedCovariance
from pyemma.coordinates.transform._tica_base import TICABase, TICAModelBase
from pyemma.util.annotators import fix_docs
import warnings
__all__ = ['TICA']
@fix_docs
class TICA(TICABase, SerializableMixIn):
r""" Time-lagged independent component analysis (TICA)"""
__serialize_version = 0
def __init__(self, lag, dim=-1, var_cutoff=0.95, kinetic_map=True, commute_map=False, epsilon=1e-6,
stride=1, skip=0, reversible=True, weights=None, ncov_max=float('inf')):
r""" Time-lagged independent component analysis (TICA) [1]_, [2]_, [3]_.
Parameters
----------
lag : int
lag time
dim : int, optional, default -1
Maximum number of significant independent components to use to reduce dimension of input data. -1 means
all numerically available dimensions (see epsilon) will be used unless reduced by var_cutoff.
Setting dim to a positive value is exclusive with var_cutoff.
var_cutoff : float in the range [0,1], optional, default 0.95
Determines the number of output dimensions by including dimensions until their cumulative kinetic variance
exceeds the fraction subspace_variance. var_cutoff=1.0 means all numerically available dimensions
(see epsilon) will be used, unless set by dim. Setting var_cutoff smaller than 1.0 is exclusive with dim
kinetic_map : bool, optional, default True
Eigenvectors will be scaled by eigenvalues. As a result, Euclidean distances in the transformed data
approximate kinetic distances [4]_. This is a good choice when the data is further processed by clustering.
commute_map : bool, optional, default False
Eigenvector_i will be scaled by sqrt(timescale_i / 2). As a result, Euclidean distances in the transformed
data will approximate commute distances [5]_.
epsilon : float
eigenvalue norm cutoff. Eigenvalues of C0 with norms <= epsilon will be
cut off. The remaining number of eigenvalues define the size
of the output.
stride: int, optional, default = 1
Use only every stride-th time step. By default, every time step is used.
skip : int, default=0
skip the first initial n frames per trajectory.
reversible: bool, default=True
symmetrize correlation matrices C_0, C_{\tau}.
weights: object or list of ndarrays, optional, default = None
* An object that allows to compute re-weighting factors to estimate equilibrium means and correlations from
off-equilibrium data. The only requirement is that weights possesses a method weights(X), that accepts a
trajectory X (np.ndarray(T, n)) and returns a vector of re-weighting factors (np.ndarray(T,)).
* A list of ndarrays (ndim=1) specifies the weights for each frame of each trajectory.
Notes
-----
Given a sequence of multivariate data :math:`X_t`, computes the mean-free
covariance and time-lagged covariance matrix:
.. math::
C_0 &= (X_t - \mu)^T (X_t - \mu) \\
C_{\tau} &= (X_t - \mu)^T (X_{t + \tau} - \mu)
and solves the eigenvalue problem
.. math:: C_{\tau} r_i = C_0 \lambda_i(tau) r_i,
where :math:`r_i` are the independent components and :math:`\lambda_i(tau)` are
their respective normalized time-autocorrelations. The eigenvalues are
related to the relaxation timescale by
.. math:: t_i(tau) = -\tau / \ln |\lambda_i|.
When used as a dimension reduction method, the input data is projected
onto the dominant independent components.
References
----------
.. [1] Perez-Hernandez G, F Paul, T Giorgino, G De Fabritiis and F Noe. 2013.
Identification of slow molecular order parameters for Markov model construction
J. Chem. Phys. 139, 015102. doi:10.1063/1.4811489
.. [2] Schwantes C, V S Pande. 2013.
Improvements in Markov State Model Construction Reveal Many Non-Native Interactions in the Folding of NTL9
J. Chem. Theory. Comput. 9, 2000-2009. doi:10.1021/ct300878a
.. [3] L. Molgedey and H. G. Schuster. 1994.
Separation of a mixture of independent signals using time delayed correlations
Phys. Rev. Lett. 72, 3634.
.. [4] Noe, F. and Clementi, C. 2015. Kinetic distance and kinetic maps from molecular dynamics simulation.
J. Chem. Theory. Comput. doi:10.1021/acs.jctc.5b00553
.. [5] Noe, F., Banisch, R., Clementi, C. 2016. Commute maps: separating slowly-mixing molecular configurations
for kinetic modeling. J. Chem. Theory. Comput. doi:10.1021/acs.jctc.6b00762
"""
super(TICA, self).__init__()
if kinetic_map and commute_map:
raise ValueError('Trying to use both kinetic_map and commute_map. Use either or.')
if (kinetic_map or commute_map) and not reversible:
kinetic_map = False
commute_map = False
warnings.warn("Cannot use kinetic_map or commute_map for non-reversible processes, both will be set to"
"False.")
# this instance will be set by partial fit.
self._covar = None
self.dim = dim
self.var_cutoff = var_cutoff
self.set_params(lag=lag, dim=dim, var_cutoff=var_cutoff, kinetic_map=kinetic_map, commute_map=commute_map,
epsilon=epsilon, reversible=reversible, stride=stride, skip=skip, weights=weights, ncov_max=ncov_max)
@property
def model(self):
if not hasattr(self, '_model') or self._model is None:
self._model = TICAModelBase()
return self._model
def describe(self):
try:
dim = self.dimension()
except RuntimeError:
dim = self.dim
return "[TICA, lag = %i; max. output dim. = %i]" % (self._lag, dim)
def estimate(self, X, **kwargs):
r"""
Chunk-based parameterization of TICA. Iterates over all data and estimates
the mean, covariance and time lagged covariance. Finally, the
generalized eigenvalue problem is solved to determine
the independent components.
"""
return super(TICA, self).estimate(X, **kwargs)
def partial_fit(self, X):
""" incrementally update the covariances and mean.
Parameters
----------
X: array, list of arrays, PyEMMA reader
input data.
Notes
-----
The projection matrix is first being calculated upon its first access.
"""
from pyemma.coordinates import source
iterable = source(X, chunksize=self.chunksize)
<|fim▁hole|> raise RuntimeError("requested more output dimensions (%i) than dimension"
" of input data (%i)" % (self.dim, indim))
if self._covar is None:
self._covar = LaggedCovariance(c00=True, c0t=True, ctt=False, remove_data_mean=True, reversible=self.reversible,
lag=self.lag, bessel=False, stride=self.stride, skip=self.skip,
weights=self.weights, ncov_max=self.ncov_max)
self._covar.partial_fit(iterable)
self.model.update_model_params(mean=self._covar.mean, # TODO: inefficient, fixme
cov=self._covar.C00_,
cov_tau=self._covar.C0t_)
self._estimated = False
return self
def _estimate(self, iterable, **kw):
covar = LaggedCovariance(c00=True, c0t=True, ctt=False, remove_data_mean=True, reversible=self.reversible,
lag=self.lag, bessel=False, stride=self.stride, skip=self.skip,
weights=self.weights, ncov_max=self.ncov_max)
indim = iterable.dimension()
if not self.dim <= indim:
raise RuntimeError("requested more output dimensions (%i) than dimension"
" of input data (%i)" % (self.dim, indim))
if self._logger_is_active(self._loglevel_DEBUG):
self.logger.debug("Running TICA with tau=%i; Estimating two covariance matrices"
" with dimension (%i, %i)", self._lag, indim, indim)
covar.estimate(iterable, chunksize=self.chunksize, **kw)
self.model.update_model_params(mean=covar.mean,
cov=covar.C00_,
cov_tau=covar.C0t_)
self._diagonalize()
return self.model
def _diagonalize(self):
# diagonalize with low rank approximation
self.logger.debug("diagonalize Cov and Cov_tau.")
try:
eigenvalues, eigenvectors = eig_corr(self.cov, self.cov_tau, self.epsilon, sign_maxelement=True)
except ZeroRankError:
raise ZeroRankError('All input features are constant in all time steps. No dimension would be left after dimension reduction.')
if self.kinetic_map and self.commute_map:
raise ValueError('Trying to use both kinetic_map and commute_map. Use either or.')
if self.kinetic_map: # scale by eigenvalues
eigenvectors *= eigenvalues[None, :]
if self.commute_map: # scale by (regularized) timescales
timescales = 1-self.lag / np.log(np.abs(eigenvalues))
# dampen timescales smaller than the lag time, as in section 2.5 of ref. [5]
regularized_timescales = 0.5 * timescales * np.maximum(np.tanh(np.pi * ((timescales - self.lag) / self.lag) + 1), 0)
eigenvectors *= np.sqrt(regularized_timescales / 2)
self.logger.debug("finished diagonalisation.")
# compute cumulative variance
cumvar = np.cumsum(np.abs(eigenvalues) ** 2)
cumvar /= cumvar[-1]
self.model.update_model_params(cumvar=cumvar,
eigenvalues=eigenvalues,
eigenvectors=eigenvectors)
self._estimated = True<|fim▁end|> | indim = iterable.dimension()
if not self.dim <= indim: |
<|file_name|>Solution.py<|end_file_name|><|fim▁begin|>"""
Given an integer, write an algorithm to convert it to hexadecimal. For negative integer, two’s complement method is used.
Note:
All letters in hexadecimal (a-f) must be in lowercase.
The hexadecimal string must not contain extra leading 0s. If the number is zero, it is represented by a single zero character '0'; otherwise, the first character in the hexadecimal string will not be the zero character.
The given number is guaranteed to fit within the range of a 32-bit signed integer.
You must not use any method provided by the library which converts/formats the number to hex directly.
Example 1:
Input:
26
Output:
"1a"
Example 2:
Input:
-1
Output:
"ffffffff"
"""
class Solution(object):<|fim▁hole|> """
curr = []
ret = []
for i in xrange(32):
curr.append(str(num & 0x01))
num = num >> 1
if len(curr) == 4:
n = int(''.join(reversed(curr)), 2)
if n < 10:
ret.append(str(n))
else:
ret.append(chr(ord('a') + n - 10))
curr = []
cleaned = []
is_ok = False
for i, n in enumerate(reversed(ret)):
if n != '0':
is_ok = True
if is_ok:
cleaned.append(n)
if not cleaned:
return '0'
return ''.join(cleaned)<|fim▁end|> | def toHex(self, num):
"""
:type num: int
:rtype: str |
<|file_name|>upper_half.rs<|end_file_name|><|fim▁begin|>use malachite_base::num::basic::unsigneds::PrimitiveUnsigned;
use malachite_base::num::conversion::traits::{JoinHalves, SplitInHalf};
use malachite_base_test_util::generators::unsigned_gen;
fn upper_half_test_helper<T: PrimitiveUnsigned + SplitInHalf>(n: T, out: T::Half)
where
T::Half: PrimitiveUnsigned,
{
assert_eq!(n.upper_half(), out);
}
#[test]<|fim▁hole|> upper_half_test_helper(0u64, 0u32);
upper_half_test_helper(1u64, 0u32);
upper_half_test_helper(u16::from(u8::MAX), 0);
upper_half_test_helper(u16::from(u8::MAX) + 1, 1);
upper_half_test_helper(u16::MAX, u8::MAX);
upper_half_test_helper(258u16, 1u8);
upper_half_test_helper(0xabcd1234u32, 0xabcd);
}
fn upper_half_properties_helper<T: JoinHalves + PrimitiveUnsigned + SplitInHalf>() {
unsigned_gen::<T>().test_properties(|n| {
let upper = n.upper_half();
assert_eq!(T::join_halves(upper, n.lower_half()), n);
});
}
#[test]
fn upper_half_properties() {
upper_half_properties_helper::<u16>();
upper_half_properties_helper::<u32>();
upper_half_properties_helper::<u64>();
upper_half_properties_helper::<u128>();
}<|fim▁end|> | pub fn test_upper_half() { |
<|file_name|>test_arcgis_swm.py<|end_file_name|><|fim▁begin|>import unittest
import pysal
from pysal.core.IOHandlers.arcgis_swm import ArcGISSwmIO
import tempfile
import os
class test_ArcGISSwmIO(unittest.TestCase):
def setUp(self):
self.test_file = test_file = pysal.examples.get_path('ohio.swm')
self.obj = ArcGISSwmIO(test_file, 'r')
def test_close(self):
f = self.obj
f.close()
self.failUnlessRaises(ValueError, f.read)
def test_read(self):
w = self.obj.read()
self.assertEqual(88, w.n)
self.assertEqual(5.25, w.mean_neighbors)
self.assertEqual([1.0, 1.0, 1.0, 1.0], w[1].values())
def test_seek(self):
self.test_read()
self.failUnlessRaises(StopIteration, self.obj.read)
self.obj.seek(0)
self.test_read()
def test_write(self):
w = self.obj.read()
f = tempfile.NamedTemporaryFile(
suffix='.swm', dir=pysal.examples.get_path(''))
fname = f.name
f.close()
o = pysal.open(fname, 'w')
o.write(w)
o.close()<|fim▁hole|> wnew = pysal.open(fname, 'r').read()
self.assertEqual(wnew.pct_nonzero, w.pct_nonzero)
os.remove(fname)
if __name__ == '__main__':
unittest.main()<|fim▁end|> | |
<|file_name|>post_limit_checker.js<|end_file_name|><|fim▁begin|>//* TITLE Post Limit Checker **//
//* VERSION 0.2.1 **//
//* DESCRIPTION Are you close to the limit? **//
//* DETAILS Shows you how many posts you can reblog today. **//
//* DEVELOPER STUDIOXENIX **//
//* FRAME false **//
//* BETA false **//
XKit.extensions.post_limit_checker = new Object({
running: false,
apiKey: "fuiKNFp9vQFvjLNvx4sUwti4Yb5yGutBN4Xh10LXZhhRKjWlV4",
run: function() {
this.running = true;
XKit.tools.init_css("post_limit_checker");
if (XKit.interface.where().dashboard !== true && XKit.interface.where().channel !== true) { return; }
var xf_html = '<ul class="controls_section" id="post_limit_checker_ul">' +
'<li class="section_header selected">Post Limit</li>' +
'<li class="no_push" style="height: 36px;"><a href="#" onclick="return false;" id="post_limit_checker_view">' +
'<div class="hide_overflow" style="color: rgba(255, 255, 255, 0.5) !important; font-weight: bold; padding-left: 10px; padding-top: 8px;">Check Post Limit</div>' +
'</a></li>' +
'</ul>';
$("ul.controls_section:first").before(xf_html);
$("#post_limit_checker_view").click(function() { XKit.extensions.post_limit_checker.start(); });
},
window_id: 0,
start: function() {
var shown_message = XKit.storage.get("post_limit_checker","shown_warning","");
var m_html = "<div id=\"xkit-plc-list\" class=\"nano\"><div id=\"xkit-plc-list-content\" class=\"content\">" +
"<div class=\"xkit-warning-plc-text\"><b>Deleted posts</b><br/>Deleted posts are count by Tumblr, but this tool can't count them. For example, if you've made 250 posts since the last reset but then deleted 50 of them, this tool will tell you that you have 50 more posts to go, but in reality you've already hit your post limit.</div>" +
"<div class=\"xkit-warning-plc-text\"><b>Original photo posts</b><br/>There is a separate, 75 uploads per day limit for photo posts. This extension does not check for that.</div>" +
"<div class=\"xkit-warning-plc-text\"><b>No Guarantee</b><br/>The XKit Guy is not making any guarantees about the reliability of this tool.</div>" +
"</div></div>";
XKit.window.show("Important!","Before beginning, please read the following carefully." + m_html,"warning","<div class=\"xkit-button default\" id=\"xkit-plc-continue\">Continue</div><div class=\"xkit-button default\" id=\"xkit-close-message\">Cancel</div>");
$("#xkit-plc-list").nanoScroller();
$("#xkit-plc-list").nanoScroller({ scroll: 'top' });
$("#xkit-plc-continue").click(function() {
XKit.extensions.post_limit_checker.window_id = XKit.tools.random_string();
XKit.window.show("Please wait","Gathering the information I need..." + XKit.progress.add("post-limit-checker-progress"),"info");
var posts = [];
for (i=0;i<XKit.tools.get_blogs().length;i++) {posts.push([]);}
XKit.extensions.post_limit_checker.next(0, posts, XKit.extensions.post_limit_checker.window_id, XKit.tools.get_blogs(), 0);
});
},
get_time: function(m_window_id, posts) {
// Calculate the date according to NY time.
// To-do: DST calculations?
var date = XKit.extensions.post_limit_checker.convert_timezone(Math.round(+new Date()/1000) * 1000, - 4);
// Now we need to figure out when the next reset is.
var next_reset = new Date(date.getFullYear(), date.getMonth(), date.getDate() + 1, 0, 0, 0);
var difference = (next_reset - date);
var hours = Math.floor((difference % 86400000) / 3600000);
var minutes = Math.floor(((difference % 86400000) % 3600000) / 60000);
// Now get when the last reset was. Lazy coding.
var last_reset = new Date(date.getFullYear(), date.getMonth(), date.getDate(), 0, 0, 0);
var posts_since_reset = 0;
for (var i=0;i<posts.length;i++) {
var m_timestamp = XKit.extensions.post_limit_checker.convert_timezone(posts[i].timestamp * 1000, - 4);
if ((m_timestamp.getTime() <= next_reset.getTime() && m_timestamp.getTime() >= last_reset.getTime())) {
posts_since_reset++;
}
}
var remaining = 250 - posts_since_reset;
var remaining_color = "#298a51";
if (remaining <= 60) { remaining_color = "#de8c00"; }
if (remaining <= 30) { remaining_color = "#ec0000"; }
if (remaining === 0) { remaining = "None"; }
var reset_str = hours + " hours and " + minutes + " minutes";
if (hours === 0) {
reset_str = minutes + " minutes";
}
if (minutes <= 1) {
reset_str = "a few seconds";<|fim▁hole|> }
if (hours >= 1 && minutes === 0) {
reset_str = hours + " hours";
}
if (hours == 1) {
reset_str = reset_str.replace("hours", "hour");
}
if (minutes == 1) {
reset_str = reset_str.replace("minutes", "minute");
}
var m_html = "<div class=\"xkit-plc-division\">" +
"<div class=\"xkit-plc-title\">Posts Made</div>" +
"<div class=\"xkit-plc-value\">" + posts_since_reset + "</div>" +
"</div>" +
"<div class=\"xkit-plc-division\">" +
"<div class=\"xkit-plc-title\">Posts Remaining</div>" +
"<div class=\"xkit-plc-value\" style=\"font-weight: bold; color: " + remaining_color + "\">" + remaining + "</div>" +
"</div>" +
"<div class=\"xkit-plc-division\">" +
"<div class=\"xkit-plc-title\">Next reset in</div>" +
"<div class=\"xkit-plc-value\">" + reset_str + "</div>" +
"</div>";
XKit.window.show("Results", "Here is what I could gather:" + m_html, "info", "<div class=\"xkit-button default\" id=\"xkit-close-message\">OK</div>");
},
convert_timezone: function(time, offset) {
// From:
// http://www.techrepublic.com/article/convert-the-local-time-to-another-time-zone-with-this-javascript/
// create Date object for current location
d = new Date(time);
// convert to msec
// add local time zone offset
// get UTC time in msec
utc = d.getTime() + (d.getTimezoneOffset() * 60000);
// create new Date object for different city
// using supplied offset
nd = new Date(utc + (3600000*offset));
// return time as a string
return nd;
},
next: function(page, posts, m_window_id, blogs, index) {
if (m_window_id !== XKit.extensions.post_limit_checker.window_id) { console.log("wrong window id. 01"); return; }
var offset = page * 20;
var api_url = "https://api.tumblr.com/v2/blog/" + blogs[index] + ".tumblr.com/posts/?api_key=" + XKit.extensions.post_limit_checker.apiKey + "&offset=" + offset;
GM_xmlhttpRequest({
method: "GET",
url: api_url,
json: true,
onerror: function(response) {
console.log("Error getting page.");
XKit.extensions.post_limit_checker.display_error(m_window_id, "501");
return;
},
onload: function(response) {
if (XKit.extensions.post_limit_checker.window_id !== m_window_id) { console.log("wrong window id. 02"); return; }
try {
data = JSON.parse(response.responseText);
for (var i=0;i<data.response.posts.length;i++) {
// I would check the date here but I'm a lazy man.
posts[index].push(data.response.posts[i]);
}
XKit.progress.value("post-limit-checker-progress", (posts[index].length / 2.5) - 10);
if (posts[index].length >= 250 || data.response.posts.length === 0) {
if (index < blogs.length - 1) {
index++;
setTimeout(function() { XKit.extensions.post_limit_checker.next(0, posts, m_window_id, blogs, index);}, 400);
} else {
var all_posts = [];
all_posts = all_posts.concat.apply(all_posts, posts);
XKit.extensions.post_limit_checker.get_time(m_window_id, all_posts);
}
} else {
setTimeout(function() { XKit.extensions.post_limit_checker.next((page + 1), posts, m_window_id, blogs, index); }, 400);
}
} catch(e) {
console.log("Error parsing page, " + e.message);
XKit.extensions.post_limit_checker.display_error(m_window_id, "551");
return;
}
}
});
},
display_error: function(m_window_id, err_code) {
if (XKit.extensions.post_limit_checker.window_id !== m_window_id) { return; }
XKit.window.show("Oops.","An error prevented me from gathering the information needed.<br/>Please try again later.<br/>Code: \"XPLC" + err_code + "\"","error","<div id=\"xkit-close-message\" class=\"xkit-button default\">OK</div>");
},
destroy: function() {
$("#post_limit_checker_ul").remove();
$("#post_limit_checker_view").remove();
this.running = false;
}
});<|fim▁end|> | |
<|file_name|>regularizers.py<|end_file_name|><|fim▁begin|># Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Built-in regularizers.
"""
# pylint: disable=invalid-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import six
from tensorflow.python.keras import backend
from tensorflow.python.keras.utils.generic_utils import deserialize_keras_object
from tensorflow.python.keras.utils.generic_utils import serialize_keras_object
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import keras_export
def _check_penalty_number(x):
"""check penalty number availability, raise ValueError if failed."""
if not isinstance(x, (float, int)):
raise ValueError(('Value: {} is not a valid regularization penalty number, '
'expected an int or float value').format(x))
if math.isinf(x) or math.isnan(x):
raise ValueError(
('Value: {} is not a valid regularization penalty number, '
'a positive/negative infinity or NaN is not a property value'
).format(x))
def _none_to_default(inputs, default):
return default if inputs is None else default
@keras_export('keras.regularizers.Regularizer')
class Regularizer(object):
"""Regularizer base class.
Regularizers allow you to apply penalties on layer parameters or layer
activity during optimization. These penalties are summed into the loss
function that the network optimizes.
Regularization penalties are applied on a per-layer basis. The exact API will
depend on the layer, but many layers (e.g. `Dense`, `Conv1D`, `Conv2D` and
`Conv3D`) have a unified API.
These layers expose 3 keyword arguments:
- `kernel_regularizer`: Regularizer to apply a penalty on the layer's kernel
- `bias_regularizer`: Regularizer to apply a penalty on the layer's bias
- `activity_regularizer`: Regularizer to apply a penalty on the layer's output
All layers (including custom layers) expose `activity_regularizer` as a
settable property, whether or not it is in the constructor arguments.
The value returned by the `activity_regularizer` is divided by the input
batch size so that the relative weighting between the weight regularizers and
the activity regularizers does not change with the batch size.
You can access a layer's regularization penalties by calling `layer.losses`
after calling the layer on inputs.
## Example
>>> layer = tf.keras.layers.Dense(
... 5, input_dim=5,
... kernel_initializer='ones',
... kernel_regularizer=tf.keras.regularizers.L1(0.01),
... activity_regularizer=tf.keras.regularizers.L2(0.01))
>>> tensor = tf.ones(shape=(5, 5)) * 2.0
>>> out = layer(tensor)
>>> # The kernel regularization term is 0.25
>>> # The activity regularization term (after dividing by the batch size) is 5
>>> tf.math.reduce_sum(layer.losses)
<tf.Tensor: shape=(), dtype=float32, numpy=5.25>
## Available penalties
```python
tf.keras.regularizers.L1(0.3) # L1 Regularization Penalty
tf.keras.regularizers.L2(0.1) # L2 Regularization Penalty
tf.keras.regularizers.L1L2(l1=0.01, l2=0.01) # L1 + L2 penalties
```
## Directly calling a regularizer
Compute a regularization loss on a tensor by directly calling a regularizer
as if it is a one-argument function.
E.g.
>>> regularizer = tf.keras.regularizers.L2(2.)
>>> tensor = tf.ones(shape=(5, 5))
>>> regularizer(tensor)
<tf.Tensor: shape=(), dtype=float32, numpy=50.0>
## Developing new regularizers
Any function that takes in a weight matrix and returns a scalar
tensor can be used as a regularizer, e.g.:
>>> @tf.keras.utils.register_keras_serializable(package='Custom', name='l1')
... def l1_reg(weight_matrix):
... return 0.01 * tf.math.reduce_sum(tf.math.abs(weight_matrix))
...
>>> layer = tf.keras.layers.Dense(5, input_dim=5,
... kernel_initializer='ones', kernel_regularizer=l1_reg)
>>> tensor = tf.ones(shape=(5, 5))
>>> out = layer(tensor)
>>> layer.losses
[<tf.Tensor: shape=(), dtype=float32, numpy=0.25>]
Alternatively, you can write your custom regularizers in an
object-oriented way by extending this regularizer base class, e.g.:
>>> @tf.keras.utils.register_keras_serializable(package='Custom', name='l2')
... class L2Regularizer(tf.keras.regularizers.Regularizer):
... def __init__(self, l2=0.): # pylint: disable=redefined-outer-name
... self.l2 = l2
...
... def __call__(self, x):
... return self.l2 * tf.math.reduce_sum(tf.math.square(x))
...
... def get_config(self):
... return {'l2': float(self.l2)}
...
>>> layer = tf.keras.layers.Dense(
... 5, input_dim=5, kernel_initializer='ones',
... kernel_regularizer=L2Regularizer(l2=0.5))
>>> tensor = tf.ones(shape=(5, 5))
>>> out = layer(tensor)
>>> layer.losses
[<tf.Tensor: shape=(), dtype=float32, numpy=12.5>]
### A note on serialization and deserialization:
Registering the regularizers as serializable is optional if you are just
training and executing models, exporting to and from SavedModels, or saving
and loading weight checkpoints.
Registration is required for Keras `model_to_estimator`, saving and
loading models to HDF5 formats, Keras model cloning, some visualization
utilities, and exporting models to and from JSON. If using this functionality,
you must make sure any python process running your model has also defined
and registered your custom regularizer.
`tf.keras.utils.register_keras_serializable` is only available in TF 2.1 and
beyond. In earlier versions of TensorFlow you must pass your custom
regularizer to the `custom_objects` argument of methods that expect custom
regularizers to be registered as serializable.
"""
def __call__(self, x):
"""Compute a regularization penalty from an input tensor."""
return 0.
@classmethod
def from_config(cls, config):
"""Creates a regularizer from its config.
This method is the reverse of `get_config`,
capable of instantiating the same regularizer from the config
dictionary.
This method is used by Keras `model_to_estimator`, saving and
loading models to HDF5 formats, Keras model cloning, some visualization
utilities, and exporting models to and from JSON.
Args:
config: A Python dictionary, typically the output of get_config.
Returns:
A regularizer instance.
"""
return cls(**config)
def get_config(self):
"""Returns the config of the regularizer.
An regularizer config is a Python dictionary (serializable)
containing all configuration parameters of the regularizer.
The same regularizer can be reinstantiated later
(without any saved state) from this configuration.
This method is optional if you are just training and executing models,
exporting to and from SavedModels, or using weight checkpoints.
This method is required for Keras `model_to_estimator`, saving and
loading models to HDF5 formats, Keras model cloning, some visualization
utilities, and exporting models to and from JSON.
Returns:
Python dictionary.
"""
raise NotImplementedError(str(self) + ' does not implement get_config()')
@keras_export('keras.regularizers.L1L2')
class L1L2(Regularizer):
"""A regularizer that applies both L1 and L2 regularization penalties.
The L1 regularization penalty is computed as:
`loss = l1 * reduce_sum(abs(x))`
<|fim▁hole|> L1L2 may be passed to a layer as a string identifier:
>>> dense = tf.keras.layers.Dense(3, kernel_regularizer='l1_l2')
In this case, the default values used are `l1=0.01` and `l2=0.01`.
Attributes:
l1: Float; L1 regularization factor.
l2: Float; L2 regularization factor.
"""
def __init__(self, l1=0., l2=0.): # pylint: disable=redefined-outer-name
# The default value for l1 and l2 are different from the value in l1_l2
# for backward compatibility reason. Eg, L1L2(l2=0.1) will only have l2
# and no l1 penalty.
l1 = 0. if l1 is None else l1
l2 = 0. if l2 is None else l2
_check_penalty_number(l1)
_check_penalty_number(l2)
self.l1 = backend.cast_to_floatx(l1)
self.l2 = backend.cast_to_floatx(l2)
def __call__(self, x):
regularization = backend.constant(0., dtype=x.dtype)
if self.l1:
regularization += self.l1 * math_ops.reduce_sum(math_ops.abs(x))
if self.l2:
regularization += self.l2 * math_ops.reduce_sum(math_ops.square(x))
return regularization
def get_config(self):
return {'l1': float(self.l1), 'l2': float(self.l2)}
@keras_export('keras.regularizers.L1', 'keras.regularizers.l1')
class L1(Regularizer):
"""A regularizer that applies a L1 regularization penalty.
The L1 regularization penalty is computed as:
`loss = l1 * reduce_sum(abs(x))`
L1 may be passed to a layer as a string identifier:
>>> dense = tf.keras.layers.Dense(3, kernel_regularizer='l1')
In this case, the default value used is `l1=0.01`.
Attributes:
l1: Float; L1 regularization factor.
"""
def __init__(self, l1=0.01, **kwargs): # pylint: disable=redefined-outer-name
l1 = kwargs.pop('l', l1) # Backwards compatibility
if kwargs:
raise TypeError('Argument(s) not recognized: %s' % (kwargs,))
l1 = 0.01 if l1 is None else l1
_check_penalty_number(l1)
self.l1 = backend.cast_to_floatx(l1)
def __call__(self, x):
return self.l1 * math_ops.reduce_sum(math_ops.abs(x))
def get_config(self):
return {'l1': float(self.l1)}
@keras_export('keras.regularizers.L2', 'keras.regularizers.l2')
class L2(Regularizer):
"""A regularizer that applies a L2 regularization penalty.
The L2 regularization penalty is computed as:
`loss = l2 * reduce_sum(square(x))`
L2 may be passed to a layer as a string identifier:
>>> dense = tf.keras.layers.Dense(3, kernel_regularizer='l2')
In this case, the default value used is `l2=0.01`.
Attributes:
l2: Float; L2 regularization factor.
"""
def __init__(self, l2=0.01, **kwargs): # pylint: disable=redefined-outer-name
l2 = kwargs.pop('l', l2) # Backwards compatibility
if kwargs:
raise TypeError('Argument(s) not recognized: %s' % (kwargs,))
l2 = 0.01 if l2 is None else l2
_check_penalty_number(l2)
self.l2 = backend.cast_to_floatx(l2)
def __call__(self, x):
return self.l2 * math_ops.reduce_sum(math_ops.square(x))
def get_config(self):
return {'l2': float(self.l2)}
@keras_export('keras.regularizers.l1_l2')
def l1_l2(l1=0.01, l2=0.01): # pylint: disable=redefined-outer-name
r"""Create a regularizer that applies both L1 and L2 penalties.
The L1 regularization penalty is computed as:
`loss = l1 * reduce_sum(abs(x))`
The L2 regularization penalty is computed as:
`loss = l2 * reduce_sum(square(x))`
Args:
l1: Float; L1 regularization factor.
l2: Float; L2 regularization factor.
Returns:
An L1L2 Regularizer with the given regularization factors.
"""
return L1L2(l1=l1, l2=l2)
# Deserialization aliases.
l1 = L1
l2 = L2
@keras_export('keras.regularizers.serialize')
def serialize(regularizer):
return serialize_keras_object(regularizer)
@keras_export('keras.regularizers.deserialize')
def deserialize(config, custom_objects=None):
if config == 'l1_l2':
# Special case necessary since the defaults used for "l1_l2" (string)
# differ from those of the L1L2 class.
return L1L2(l1=0.01, l2=0.01)
return deserialize_keras_object(
config,
module_objects=globals(),
custom_objects=custom_objects,
printable_module_name='regularizer')
@keras_export('keras.regularizers.get')
def get(identifier):
"""Retrieve a regularizer instance from a config or identifier."""
if identifier is None:
return None
if isinstance(identifier, dict):
return deserialize(identifier)
elif isinstance(identifier, six.string_types):
return deserialize(str(identifier))
elif callable(identifier):
return identifier
else:
raise ValueError(
'Could not interpret regularizer identifier: {}'.format(identifier))<|fim▁end|> | The L2 regularization penalty is computed as
`loss = l2 * reduce_sum(square(x))`
|
<|file_name|>dictio.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#Covered by GPL V2.0
from encoders import *
from payloads import *<|fim▁hole|> def __init__(self,dicc=None):
if dicc:
self.__payload=dicc.getpayload()
self.__encoder=dicc.getencoder()
else:
self.__payload=payload()
self.__encoder = [lambda x: encoder().encode(x)]
self.restart()
def count (self):
return self.__payload.count() * len(self.__encoder)
def setpayload(self,payl):
self.__payload = payl
self.restart()
def setencoder(self,encd):
self.__encoder=encd
self.generator = self.gen()
def getpayload (self):
return self.__payload
def getencoder (self):
return self.__encoder
def generate_all(self):
dicc=[]
for i in self.__payload:
dicc.append(self.__encoder.encode(i))
return dicc
def __iter__(self):
self.restart()
return self
def gen(self):
while 1:
pl=self.iter.next()
for encode in self.__encoder:
yield encode(pl)
def next(self):
return self.generator.next()
def restart(self):
self.iter=self.__payload.__iter__()
self.generator = self.gen()<|fim▁end|> |
# generate_dictio evolution
class dictionary: |
<|file_name|>captcha.py<|end_file_name|><|fim▁begin|>import os, requests, tempfile, time, webbrowser
import lacuna.bc
import lacuna.exceptions as err
### Dev notes:
### The tempfile containing the captcha image is not deleted until solveit()
### has been called.
###
### Allowing the tempfile to delete itself (delete=True during tempfile
### creation), or using the tempfile in conjunction with a 'with:' expression,
### have both been attempted.
###
### The problem is that, when using those auto-deletion methods, the tempfile
### is occasionally being removed from the system before the image viewer
### we're firing off actually gets a chance to read it. Everything is
### happening in the right order, it's just that the image viewer startup is
### too slow.
###
### Deleting the tempfile manually in solveit() works - don't decide to get
### clever and replace the unlink() in solveit() with some form of tempfile
### autodeletion without a lot of testing.
class Captcha(lacuna.bc.LacunaObject):
""" Fetches, displays, and solves graphical captchas.
General usage will be::
cap = my_client.get_captcha()
cap.showit() # display the captcha image
cap.prompt_user() # ask the user for a solution
cap.solveit() # check the user's solution
"""
path = 'captcha'
@lacuna.bc.LacunaObject.call_returning_meth
def fetch( self, **kwargs ):
""" Fetches a captcha for the user to solve from the server.
This mirrors the TLE API, but you generally don't need to call this.
Returns a :class:`lacuna.captcha.Puzzle` object.
"""
return Puzzle( self.client, kwargs['rslt'] )
def showit( self ):
""" Actually downloads the captcha image, and attempts to display it
to the user in one of several browsers.
If :meth:`fetch` is called first, :meth:`showit` uses that fetched data, but
this is not necessary. :meth:`showit` will call fetch for you.
Raises :class:`lacuna.exceptions.RequestError` if the image is not
fetchable (network error or the TLE servers have gone down).
Raises EnvironmentError if it cannot find an image viewer to use to
display the captcha image.
"""
if not hasattr(self,'url') or not hasattr(self,'guid'):
puzzle = self.fetch()
self.url = puzzle.url
self.guid = puzzle.guid
img_resp = requests.get( self.url )
if img_resp.status_code != 200:
raise err.RequestError("The captcha image URL is not responding.")
f = tempfile.NamedTemporaryFile( suffix='.png', prefix='tle_capcha_', delete=False );
self.tempfile = f.name
f.write( img_resp.content )
if hasattr(img_resp, 'connection'):
img_resp.connection.close()
local_url = 'file://' + f.name
found_browser = False
for b in [ None, 'windows-default', 'macosx', 'safari', 'firefox',
'google-chrome', 'chrome', 'chromium-browser', 'chromium' ]:
try:
browser = webbrowser.get( b )
browser.open( local_url, 0, True )
found_browser = True
break
except webbrowser.Error as e:
pass
if not found_browser:
raise EnvironmentError("Unable to find a browser to show the captcha image. Captcha solution is required.")
def prompt_user(self):
""" Prompts the user to solve the displayed captcha.
It's not illegal to call this without first calling :meth:`solveit`,
but doing so makes no sense.
"""
self.resp = input("Enter the solution to the captcha here: ")
return self.resp
def solveit(self):
""" Sends the user's response to the server to check for accuracy.
Returns True if the user's response was correct. Raises
:class:`lacuna.exceptions.CaptchaResponseError` otherwise.
"""
if not hasattr(self,'resp'):
raise AttributeError("You must prompt the user for a response before calling solveit().")
try:
self.solve( self.guid, self.resp )
except err.ServerError as e:
raise err.CaptchaResponseError("Incorrect captcha response")
finally:
delattr( self, 'url' )<|fim▁hole|> if os.path.isfile(self.tempfile):
os.unlink( self.tempfile )
return True
@lacuna.bc.LacunaObject.call_member_meth
def solve( self, guid:str, solution:str, **kwargs ):
""" Mirrors the TLE Captcha module's :meth:`solve` method, but unless you
really need this and you really know why, use :meth:`solveit` instead.
"""
pass
class Puzzle(lacuna.bc.SubClass):
"""
Object Attributes::
url FQ URL to the puzzle image
guid uuid attached to the puzzle; must be passed back along with
the solution.
"""<|fim▁end|> | delattr( self, 'guid' )
delattr( self, 'resp' ) |
<|file_name|>test_update_loyalty_program.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime<|fim▁hole|>from talon_one.rest import ApiException
class TestUpdateLoyaltyProgram(unittest.TestCase):
"""UpdateLoyaltyProgram unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test UpdateLoyaltyProgram
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = talon_one.models.update_loyalty_program.UpdateLoyaltyProgram() # noqa: E501
if include_optional :
return UpdateLoyaltyProgram(
title = '0',
description = '0',
subscribed_applications = [
56
],
default_validity = '0',
default_pending = '0',
allow_subledger = True
)
else :
return UpdateLoyaltyProgram(
)
def testUpdateLoyaltyProgram(self):
"""Test UpdateLoyaltyProgram"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()<|fim▁end|> |
import talon_one
from talon_one.models.update_loyalty_program import UpdateLoyaltyProgram # noqa: E501 |
<|file_name|>job.py<|end_file_name|><|fim▁begin|>from disco.core import Disco, result_iterator
from disco.settings import DiscoSettings
from disco.func import chain_reader
from discodex.objects import DataSet
from freequery.document import docparse
from freequery.document.docset import Docset
from freequery.index.tf_idf import TfIdf
class IndexJob(object):
def __init__(self, spec, discodex,
disco_addr="disco://localhost", profile=False):
# TODO(sqs): refactoring potential with PagerankJob
self.spec = spec
self.discodex = discodex
self.docset = Docset(spec.docset_name)
self.disco = Disco(DiscoSettings()['DISCO_MASTER'])
self.nr_partitions = 8
self.profile = profile
def start(self):
results = self.__run_job(self.__index_job())
self.__run_discodex_index(results)
def __run_job(self, job):
results = job.wait()
if self.profile:
self.__profile_job(job)
return results
def __index_job(self):
return self.disco.new_job(
name="index_tfidf",
input=['tag://' + self.docset.ddfs_tag],
map_reader=docparse,
map=TfIdf.map,
reduce=TfIdf.reduce,
sort=True,
partitions=self.nr_partitions,
partition=TfIdf.partition,
merge_partitions=False,
profile=self.profile,
params=dict(doc_count=self.docset.doc_count))<|fim▁hole|> 'demuxer': 'freequery.index.tf_idf.TfIdf_demux',
'nr_ichunks': 1, # TODO(sqs): after disco#181 fixed, increase this
}
ds = DataSet(input=results, options=opts)
origname = self.discodex.index(ds)
self.disco.wait(origname) # origname is also the disco job name
self.discodex.clone(origname, self.spec.invindex_name)<|fim▁end|> |
def __run_discodex_index(self, results):
opts = {
'parser': 'disco.func.chain_reader', |
<|file_name|>ContextImpl.java<|end_file_name|><|fim▁begin|>/*
************************************************************************
******************* CANADIAN ASTRONOMY DATA CENTRE *******************
************** CENTRE CANADIEN DE DONNÉES ASTRONOMIQUES **************
*
* (c) 2011. (c) 2011.
* Government of Canada Gouvernement du Canada
* National Research Council Conseil national de recherches
* Ottawa, Canada, K1A 0R6 Ottawa, Canada, K1A 0R6
* All rights reserved Tous droits réservés
*
* NRC disclaims any warranties, Le CNRC dénie toute garantie
* expressed, implied, or énoncée, implicite ou légale,
* statutory, of any kind with de quelque nature que ce
* respect to the software, soit, concernant le logiciel,
* including without limitation y compris sans restriction
* any warranty of merchantability toute garantie de valeur
* or fitness for a particular marchande ou de pertinence
* purpose. NRC shall not be pour un usage particulier.
* liable in any event for any Le CNRC ne pourra en aucun cas
* damages, whether direct or être tenu responsable de tout
* indirect, special or general, dommage, direct ou indirect,
* consequential or incidental, particulier ou général,
* arising from the use of the accessoire ou fortuit, résultant
* software. Neither the name de l'utilisation du logiciel. Ni
* of the National Research le nom du Conseil National de
* Council of Canada nor the Recherches du Canada ni les noms
* names of its contributors may de ses participants ne peuvent
* be used to endorse or promote être utilisés pour approuver ou
* products derived from this promouvoir les produits dérivés
* software without specific prior de ce logiciel sans autorisation
* written permission. préalable et particulière
* par écrit.
*
* This file is part of the Ce fichier fait partie du projet
* OpenCADC project. OpenCADC.
*
* OpenCADC is free software: OpenCADC est un logiciel libre ;
* you can redistribute it and/or vous pouvez le redistribuer ou le
* modify it under the terms of modifier suivant les termes de
* the GNU Affero General Public la “GNU Affero General Public
* License as published by the License” telle que publiée
* Free Software Foundation, par la Free Software Foundation
* either version 3 of the : soit la version 3 de cette
* License, or (at your option) licence, soit (à votre gré)
* any later version. toute version ultérieure.
*
* OpenCADC is distributed in the OpenCADC est distribué
* hope that it will be useful, dans l’espoir qu’il vous
* but WITHOUT ANY WARRANTY; sera utile, mais SANS AUCUNE
* without even the implied GARANTIE : sans même la garantie
* warranty of MERCHANTABILITY implicite de COMMERCIALISABILITÉ
* or FITNESS FOR A PARTICULAR ni d’ADÉQUATION À UN OBJECTIF
* PURPOSE. See the GNU Affero PARTICULIER. Consultez la Licence
* General Public License for Générale Publique GNU Affero
* more details. pour plus de détails.
*
* You should have received Vous devriez avoir reçu une
* a copy of the GNU Affero copie de la Licence Générale
* General Public License along Publique GNU Affero avec
* with OpenCADC. If not, see OpenCADC ; si ce n’est
* <http://www.gnu.org/licenses/>. pas le cas, consultez :
* <http://www.gnu.org/licenses/>.
*
* $Revision: 5 $
*
************************************************************************
*/
package ca.nrc.cadc.ac.admin;
import javax.naming.Binding;
import javax.naming.Context;
import javax.naming.Name;
import javax.naming.NameClassPair;
import javax.naming.NameParser;
import javax.naming.NamingEnumeration;
import javax.naming.NamingException;
import java.util.Hashtable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* A Simple JNDI context.
*/
public class ContextImpl implements Context
{
private final static ConcurrentMap<String,Object> POOL_MAP =
new ConcurrentHashMap<>(1);
@Override
public Object lookup(String name) throws NamingException
{
return POOL_MAP.get(name);
}
@Override
public void bind(String name, Object value) throws NamingException
{
POOL_MAP.put(name, value);
}
@Override
public Object addToEnvironment(String arg0, Object arg1)
throws NamingException
{
return null;
}
@Override
public void bind(Name arg0, Object arg1) throws NamingException
{
}
@Override
public void close() throws NamingException
{
}
@Override
public Name composeName(Name arg0, Name arg1) throws NamingException
{
return null;
}
@Override
public String composeName(String arg0, String arg1)
throws NamingException
{
return null;
}
@Override
public Context createSubcontext(Name arg0) throws NamingException
{
// TODO Auto-generated method stub
return null;
}
@Override
public Context createSubcontext(String arg0) throws NamingException
{
return null;
}
@Override<|fim▁hole|> public void destroySubcontext(Name arg0) throws NamingException
{
}
@Override
public void destroySubcontext(String arg0) throws NamingException
{
// TODO Auto-generated method stub
}
@Override
public Hashtable<?, ?> getEnvironment() throws NamingException
{
return null;
}
@Override
public String getNameInNamespace() throws NamingException
{
return null;
}
@Override
public NameParser getNameParser(Name arg0) throws NamingException
{
return null;
}
@Override
public NameParser getNameParser(String arg0) throws NamingException
{
return null;
}
@Override
public NamingEnumeration<NameClassPair> list(Name arg0)
throws NamingException
{
return null;
}
@Override
public NamingEnumeration<NameClassPair> list(String arg0)
throws NamingException
{
// TODO Auto-generated method stub
return null;
}
@Override
public NamingEnumeration<Binding> listBindings(Name arg0)
throws NamingException
{
return null;
}
@Override
public NamingEnumeration<Binding> listBindings(String arg0)
throws NamingException
{
return null;
}
@Override
public Object lookup(Name arg0) throws NamingException
{
// TODO Auto-generated method stub
return null;
}
@Override
public Object lookupLink(Name arg0) throws NamingException
{
return null;
}
@Override
public Object lookupLink(String arg0) throws NamingException
{
return null;
}
@Override
public void rebind(Name arg0, Object arg1) throws NamingException
{
}
@Override
public void rebind(String arg0, Object arg1) throws NamingException
{
}
@Override
public Object removeFromEnvironment(String arg0) throws NamingException
{
return null;
}
@Override
public void rename(Name arg0, Name arg1) throws NamingException
{
}
@Override
public void rename(String arg0, String arg1) throws NamingException
{
}
@Override
public void unbind(Name arg0) throws NamingException
{
}
@Override
public void unbind(String arg0) throws NamingException
{
}
}<|fim▁end|> | |
<|file_name|>Test_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
import os
import glob<|fim▁hole|>import cgi
import PrintPages_test as pt
address = cgi.escape(os.environ["REMOTE_ADDR"])
script = "Main Model Form"
pt.write_log_entry(script, address)
pt.print_header('GrowChinook', 'Std')
pt.print_full_form(None, None, 'in', 'RunModel.py')
extension = 'csv'
os.chdir('uploads')
result = [i for i in glob.glob('*.csv')]
print('''
{}
</div>
</body>
'''.format(result))
print ('</html>')<|fim▁end|> | |
<|file_name|>parsetwo.py<|end_file_name|><|fim▁begin|>import sys
import os
def convbytes(bstr):
return str(buildint(bstr))
def buildint(bvals):
return int.from_bytes(bvals, byteorder='little')
def buildstr(bvals):
print(str(bvals))
#return str(bvals)
return bvals.decode("utf-8")
def getOutFile(infilename, outfiledir):
lastslashind = infilename.rfind("/")
ifname = infilename[lastslashind+1:]
ifname = outfiledir+"parsed_"+ifname
return ifname
infiledir = sys.argv[1]
outfiledir = sys.argv[2]
statenames = ["main", "doencode", "cksum", "compare_files", "treat_file", "make_ofname"]
for infilename in os.listdir(infiledir):
outfilename = getOutFile(infilename, outfiledir)
infilename = infiledir + infilename
with open(infilename, 'rb') as infile:
with open(outfilename, 'w') as ofile:
bytes_read = infile.read(4)
while bytes_read:
ival = buildint(bytes_read)
print ("read state name size int:"+str(ival))
bytes_read = infile.read(ival)
sval = buildstr(bytes_read)
print("sval:"+sval)
print("len sval:"+str(len(sval)))
while bytes_read:
print ("read bytes:"+sval)
ofile.write("DROPSTATE:"+sval)
ofile.write("\n")
bytes_read = infile.read(4)
ival = buildint(bytes_read)
print("read paircount:"+str(ival))
while bytes_read:
bytes_read = infile.read(4)
lival = buildint(bytes_read)
print("read size of name:"+str(lival))
bytes_read = infile.read(lival)
sval = buildstr(bytes_read)
print("read name:"+sval)
if(sval.startswith(tuple(statenames))):
print("BREAK!")
break
ofile.write(sval)
ofile.write(",")
bytes_read = infile.read(4)<|fim▁hole|> bytes_read = infile.read(lival)
sval = buildstr(bytes_read)
print("read type:"+sval)
ofile.write(sval)
ofile.write(",")
bytes_read = infile.read(4)
lival = buildint(bytes_read)
print("read size of data:"+str(lival))
bytes_read = infile.read(lival)
print("read data:"+str(bytes_read))
ofile.write(convbytes(bytes_read))
ofile.write("\n")<|fim▁end|> | lival = buildint(bytes_read)
print("read size of type:"+str(lival)) |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use ffi::{SystemError, OPERATION_CANCELED};
use reactor::Reactor;
use core::{AsIoContext, IoContext, Perform, ThreadIoContext};
use std::cmp::Ordering;
use std::ops::{Deref, DerefMut};
use std::sync::Mutex;
use std::time::{Duration, Instant, SystemTime};
use libc::timespec;
#[cfg(not(target_os = "linux"))]
mod nolinux;
#[cfg(not(target_os = "linux"))]
use self::nolinux::TimerCtl;
#[cfg(target_os = "linux")]
mod linux;
#[cfg(target_os = "linux")]
use self::linux::TimerFd as TimerCtl;
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Debug)]
pub struct Expiry(Duration);
impl Expiry {
pub fn zero() -> Self {
Expiry(Duration::new(0, 0))
}
pub fn now() -> Self {
Instant::now().into()
}
fn diff(&self, other: Self) -> usize {
let sec_cmp = self.0.as_secs().cmp(&other.0.as_secs());
let nsec_cmp = self.0.subsec_nanos().cmp(&other.0.subsec_nanos());
match (sec_cmp, nsec_cmp) {
(Ordering::Equal, Ordering::Greater) => {
(self.0.subsec_nanos() - other.0.subsec_nanos()) as usize
}
(Ordering::Greater, Ordering::Less) => {
(self.0.as_secs() - other.0.as_secs()) as usize * 1_000_000_000 -
(other.0.subsec_nanos() - self.0.subsec_nanos()) as usize
}
(Ordering::Greater, Ordering::Equal) => {
(self.0.as_secs() - other.0.as_secs()) as usize * 1_000_000_000
}
(Ordering::Greater, Ordering::Greater) => {
(self.0.as_secs() - other.0.as_secs()) as usize * 1_000_000_000 +
(self.0.subsec_nanos() - other.0.subsec_nanos()) as usize
}<|fim▁hole|> pub fn left(&self) -> usize {
self.diff(Expiry::now())
}
pub fn abs_time(&self) -> timespec {
timespec {
tv_sec: self.0.as_secs() as i64,
tv_nsec: self.0.subsec_nanos() as i64,
}
}
}
impl From<Instant> for Expiry {
fn from(t: Instant) -> Self {
use std::mem;
Expiry(t.duration_since(unsafe { mem::zeroed() }))
}
}
impl From<SystemTime> for Expiry {
fn from(t: SystemTime) -> Self {
match t.duration_since(SystemTime::now()) {
Ok(t) => Expiry(Expiry::now().0 + t),
Err(_) => Expiry::now(),
}
}
}
pub struct TimerImpl {
ctx: IoContext,
expiry: Expiry,
op: Option<Box<Perform>>,
}
impl TimerImpl {
pub fn new(ctx: &IoContext) -> Box<Self> {
Box::new(TimerImpl {
ctx: ctx.clone(),
expiry: Expiry::zero(),
op: None,
})
}
pub fn set_wait_op(&self, this: &mut ThreadIoContext, op: Box<Perform>) {
if let Some(op) = self.ctx.as_reactor().tq.insert(self, op) {
this.push(op, OPERATION_CANCELED)
}
}
pub fn reset_expiry(&self, expiry: Expiry) {
if let Some(op) = self.ctx.as_reactor().tq.erase(self, expiry) {
self.ctx.do_dispatch((op, OPERATION_CANCELED))
}
}
pub fn cancel(&self) {
if let Some(op) = self.ctx.as_reactor().tq.erase(self, Expiry::zero()) {
self.ctx.do_dispatch((op, OPERATION_CANCELED))
}
}
}
unsafe impl AsIoContext for TimerImpl {
fn as_ctx(&self) -> &IoContext {
if let Some(this) = ThreadIoContext::callstack(&self.ctx) {
this.as_ctx()
} else {
&self.ctx
}
}
}
#[derive(Clone)]
struct TimerImplRef(*const TimerImpl);
impl Deref for TimerImplRef {
type Target = TimerImpl;
fn deref(&self) -> &Self::Target {
unsafe { &*self.0 }
}
}
impl DerefMut for TimerImplRef {
fn deref_mut(&mut self) -> &mut Self::Target {
unsafe { &mut *(self.0 as *mut TimerImpl) }
}
}
impl PartialEq for TimerImplRef {
fn eq(&self, other: &Self) -> bool {
self.0.eq(&other.0)
}
}
impl Eq for TimerImplRef {}
impl PartialOrd for TimerImplRef {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
match unsafe { &*self.0 }.expiry.partial_cmp(&unsafe { &*other.0 }.expiry) {
Some(Ordering::Equal) => self.0.partial_cmp(&other.0),
cmp => cmp,
}
}
}
impl Ord for TimerImplRef {
fn cmp(&self, other: &Self) -> Ordering {
match unsafe { &*self.0 }.expiry.cmp(&unsafe { &*other.0 }.expiry) {
Ordering::Equal => self.0.cmp(&other.0),
cmp => cmp,
}
}
}
pub struct TimerQueue {
mutex: Mutex<Vec<TimerImplRef>>,
ctl: TimerCtl,
}
impl TimerQueue {
pub fn new() -> Result<Self, SystemError> {
Ok(TimerQueue {
mutex: Mutex::default(),
ctl: try!(TimerCtl::new()),
})
}
pub fn startup(&self, reactor: &Reactor) {
self.ctl.startup(reactor)
}
pub fn cleanup(&self, reactor: &Reactor) {
self.ctl.cleanup(reactor)
}
pub fn wait_duration(&self, max: usize) -> usize {
self.ctl.wait_duration(max)
}
pub fn get_ready_timers(&self, this: &mut ThreadIoContext) {
let mut tq = self.mutex.lock().unwrap();
let i = match tq.binary_search_by(|e| e.expiry.cmp(&Expiry::now())) {
Ok(i) => i + 1,
Err(i) => i,
};
for mut e in tq.drain(..i) {
this.push(e.op.take().unwrap(), SystemError::default());
}
}
pub fn insert(&self, timer: &TimerImpl, op: Box<Perform>) -> Option<Box<Perform>> {
let mut tq = self.mutex.lock().unwrap();
let mut timer = TimerImplRef(timer);
let old_op = timer.op.take();
timer.op = Some(op);
let i = tq.binary_search(&timer).unwrap_err();
tq.insert(i, timer.clone());
if i == 0 {
self.ctl.reset_timeout(&timer);
}
old_op
}
pub fn erase(&self, timer: &TimerImpl, expiry: Expiry) -> Option<Box<Perform>> {
let mut tq = self.mutex.lock().unwrap();
let mut timer = TimerImplRef(timer);
let old_op = timer.op.take();
if let Ok(i) = tq.binary_search(&timer) {
tq.remove(i);
for timer in tq.first().iter() {
self.ctl.reset_timeout(&timer);
}
}
timer.expiry = expiry;
old_op
}
}
#[test]
fn test_expiry_diff() {
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(2, 0))),
0
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(2, 1))),
0
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(2, 2))),
0
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(1, 2))),
0
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(1, 1))),
0
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(1, 0))),
1
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(0, 0))),
1_000_000_001
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(0, 1))),
1_000_000_000
);
assert_eq!(
Expiry(Duration::new(1, 1)).diff(Expiry(Duration::new(0, 2))),
999_999_999
);
}
#[test]
fn test_eq() {
use std::time::Instant;
let now = Instant::now();
let ctx = &IoContext::new().unwrap();
let t1 = TimerImpl {
ctx: ctx.clone(),
expiry: now.into(),
op: None,
};
let t2 = TimerImpl {
ctx: ctx.clone(),
expiry: now.into(),
op: None,
};
assert!(TimerImplRef(&t1) == TimerImplRef(&t1));
assert!(TimerImplRef(&t1) != TimerImplRef(&t2));
}
#[test]
fn test_cmp() {
use std::time::{Duration, Instant};
let now = Instant::now();
let ctx = &IoContext::new().unwrap();
let t1 = TimerImpl {
ctx: ctx.clone(),
expiry: (now + Duration::new(1, 0)).into(),
op: None,
};
let t2 = TimerImpl {
ctx: ctx.clone(),
expiry: (now + Duration::new(2, 0)).into(),
op: None,
};
let t3 = TimerImpl {
ctx: ctx.clone(),
expiry: (now + Duration::new(2, 0)).into(),
op: None,
};
assert!(TimerImplRef(&t1) < TimerImplRef(&t2));
if (&t2 as *const _) < (&t3 as *const _) {
assert!(TimerImplRef(&t2) < TimerImplRef(&t3));
} else {
assert!(TimerImplRef(&t3) < TimerImplRef(&t2));
}
}<|fim▁end|> | _ => 0,
}
}
|
<|file_name|>build.js<|end_file_name|><|fim▁begin|>module.exports = require('./../make')({<|fim▁hole|><|fim▁end|> | build: true
}); |
<|file_name|>en-CA.ts<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
export default [
[
['mid', 'n', 'mor', 'aft', 'eve', 'night'],
['midnight', 'noon', 'in the morning', 'in the afternoon', 'in the evening', 'at night'], u
],
[
['mid', 'noon', 'mor', 'aft', 'eve', 'night'],
['midnight', 'noon', 'morning', 'afternoon', 'evening', 'night'], u<|fim▁hole|> ],
[
'00:00', '12:00', ['06:00', '12:00'], ['12:00', '18:00'], ['18:00', '21:00'],
['21:00', '06:00']
]
];<|fim▁end|> | |
<|file_name|>test_multiprocessing.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class TestMP:
def __init__(self,n):
self.n = n
@staticmethod
def worker(q):
"""worker function"""
# print('worker',*args)
# print("ppid= {} pid= {}".format(os.getppid(),os.getpid()))
q.put([1,'x',(os.getpid(),[])])
return
def main(self):
if __name__ == '__main__':
jobs = []
for i in range(self.n):
q = Queue()
p = Process(target=self.worker,args=(q,))
jobs.append((p,q))
p.start()
for i in range(self.n):
j=jobs.pop(0)
j[0].join()
msg = j[1].get()
print("job no {} ended, msg: {}".format(i,msg))
m=TestMP(10)
m.main()<|fim▁end|> | from multiprocessing import Process,Queue
import os |
<|file_name|>parse_annotations.py<|end_file_name|><|fim▁begin|># annotation parser that processes our hashtags
import re
def get_abstracts(filename):
output = []
abstract_buffer = []
notes_buffer = []
last_abstract_no = 0
last_pmid = 0
last_biviewid = 0
with open(filename, 'rb') as f:
for line in f: # fast forward to abstract 1
m = re.match("Abstract 1 of [1-9][0-9]*", line)
if m:
record_abstract=True
last_abstract_no = 1
break
for line in f:
m = re.match("Abstract ([1-9][0-9]*) of [1-9][0-9]*", line.strip())
if m:
record_abstract = True
output.append({"abstract": "\n".join(abstract_buffer),
"notes": notes_buffer,
"pmid": last_pmid,
"biviewid": last_biviewid,
"annotid": last_abstract_no})
abstract_buffer, notes_buffer = [], []
last_abstract_no = int(m.group(1))
continue
m = re.match("BiviewID ([0-9]+); PMID ([0-9]+)", line)
if m:
record_abstract = False
last_biviewid = int(m.group(1))
last_pmid = int(m.group(2))
continue
if line.strip():
if record_abstract:
abstract_buffer.append(line)
else:
notes_buffer.append(line)
else:<|fim▁hole|> return output
def main():
a = get_abstracts("data/drug_trials_in_cochrane_BCW.txt")
b = get_abstracts("data/drug_trials_in_cochrane_IJM.txt")
i = 128
print
print a[i]
print
print b[i]
if __name__ == '__main__':
main()<|fim▁end|> | output.append({"abstract": "\n".join(abstract_buffer),
"notes": notes_buffer})
|
<|file_name|>max_abs_scaler_example.py<|end_file_name|><|fim▁begin|>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with<|fim▁hole|># (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import MaxAbsScaler
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("MaxAbsScalerExample")\
.getOrCreate()
# $example on$
dataFrame = spark.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
scaler = MaxAbsScaler(inputCol="features", outputCol="scaledFeatures")
# Compute summary statistics and generate MaxAbsScalerModel
scalerModel = scaler.fit(dataFrame)
# rescale each feature to range [-1, 1].
scaledData = scalerModel.transform(dataFrame)
scaledData.show()
# $example off$
spark.stop()<|fim▁end|> | # this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0 |
<|file_name|>s3aaa.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
""" Authentication, Authorization, Accouting
@requires: U{B{I{gluon}} <http://web2py.com>}
@copyright: (c) 2010-2012 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ["AuthS3",
"S3Permission",
"S3Audit",
"S3RoleManager",
"FaceBookAccount",
"GooglePlusAccount",
]
import datetime
import re
import time
import urllib
from urllib import urlencode
import urllib2
from gluon import *
from gluon.storage import Storage, Messages
from gluon.dal import Field, Row, Query, Set, Table, Expression
from gluon.sqlhtml import CheckboxesWidget, StringWidget
from gluon.tools import Auth, callback, addrow
from gluon.utils import web2py_uuid
from gluon.validators import IS_SLUG
from gluon.contrib import simplejson as json
from gluon.contrib.simplejson.ordered_dict import OrderedDict
from gluon.contrib.login_methods.oauth20_account import OAuthAccount
from s3method import S3Method
from s3validators import IS_ACL
from s3widgets import S3ACLWidget, CheckboxesWidgetS3
from s3utils import s3_mark_required
from s3fields import s3_uid, s3_timestamp, s3_deletion_status
DEFAULT = lambda: None
table_field = re.compile("[\w_]+\.[\w_]+")
DEBUG = False
if DEBUG:
import sys
print >> sys.stderr, "S3AAA: DEBUG MODE"
def _debug(m):
print >> sys.stderr, m
else:
_debug = lambda m: None
# =============================================================================
class AuthS3(Auth):
"""
S3 extensions of the gluon.tools.Auth class
- override:
define_tables()
login()
register()
profile()
verify_email()
requires_membership()
- add:
s3_has_role()
s3_has_permission()
s3_logged_in()
s3_accessible_query()
s3_impersonate()
s3_register() callback
s3_link_to_person()
s3_verify_email_onaccept()
s3_group_members()
s3_user_to_person()
s3_person_to_user()
person_id()
- language
- utc_offset
- organisation
- @ToDo: Facility
"""
# Configuration of UIDs for system roles
S3_SYSTEM_ROLES = Storage(ADMIN = "ADMIN",
AUTHENTICATED = "AUTHENTICATED",
ANONYMOUS = "ANONYMOUS",
EDITOR = "EDITOR",
MAP_ADMIN = "MAP_ADMIN")
def __init__(self):
""" Initialise parent class & make any necessary modifications """
Auth.__init__(self, current.db)
deployment_settings = current.deployment_settings
system_name = deployment_settings.get_system_name()
self.settings.lock_keys = False
self.settings.username_field = False
self.settings.lock_keys = True
self.messages.lock_keys = False
self.messages.registration_pending_approval = "Account registered, however registration is still pending approval - please wait until confirmation received."
self.messages.email_approver_failed = "Failed to send mail to Approver - see if you can notify them manually!"
self.messages.email_verification_failed = "Unable to send verification email - either your email is invalid or our email server is down"
self.messages.email_sent = "Verification Email sent - please check your email to validate. If you do not receive this email please check you junk email or spam filters"
self.messages.email_verified = "Email verified - you can now login"
self.messages.welcome_email_subject = "Welcome to %(system_name)s" % \
dict(system_name=system_name)
self.messages.welcome_email = \
"Welcome to %(system_name)s - click on the link %(url)s to complete your profile" % \
dict(system_name = system_name,
url = deployment_settings.get_base_public_url() + URL("default", "user", args=["profile"]))
self.messages.duplicate_email = "This email address is already in use"
self.messages.registration_disabled = "Registration Disabled!"
self.messages.registration_verifying = "You haven't yet Verified your account - please check your email"
self.messages.label_organisation_id = "Organization"
self.messages.label_site_id = "Facility"
self.messages.label_utc_offset = "UTC Offset"
self.messages.label_image = "Profile Image"
self.messages.help_utc_offset = "The time difference between UTC and your timezone, specify as +HHMM for eastern or -HHMM for western timezones."
self.messages.help_mobile_phone = "Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages."
self.messages.help_organisation = "Entering an Organization is optional, but doing so directs you to the appropriate approver & means you automatically get the appropriate permissions."
self.messages.help_image = "You can either use %(gravatar)s or else upload a picture here. The picture will be resized to 50x50."
#self.messages.logged_in = "Signed In"
#self.messages.submit_button = "Signed In"
#self.messages.logged_out = "Signed Out"
self.messages.lock_keys = True
# S3Permission
self.permission = S3Permission(self)
# Set to True to override any authorization
self.override = False
# Site types (for OrgAuth)
T = current.T
if deployment_settings.get_ui_camp():
shelter = T("Camp")
else:
shelter = T("Shelter")
self.org_site_types = Storage(
cr_shelter = shelter,
#org_facility = T("Facility"),
org_facility = T("Site"),
org_office = T("Office"),
hms_hospital = T("Hospital"),
#project_site = T("Project Site"),
#fire_station = T("Fire Station"),
)
# -------------------------------------------------------------------------
def define_tables(self, migrate=True, fake_migrate=False):
"""
to be called unless tables are defined manually
usages::
# defines all needed tables and table files
# UUID + "_auth_user.table", ...
auth.define_tables()
# defines all needed tables and table files
# "myprefix_auth_user.table", ...
auth.define_tables(migrate="myprefix_")
# defines all needed tables without migration/table files
auth.define_tables(migrate=False)
"""
db = current.db
request = current.request
session = current.session
settings = self.settings
messages = self.messages
# User table
if not settings.table_user:
passfield = settings.password_field
if settings.username_field:
# with username (not used by default in Sahana)
settings.table_user = db.define_table(
settings.table_user_name,
Field("first_name", length=128, default="",
label=messages.label_first_name),
Field("last_name", length=128, default="",
label=messages.label_last_name),
Field("username", length=128, default="",
unique=True),
Field(passfield, "password", length=512,
readable=False, label=messages.label_password),
Field("email", length=512, default="",
label=messages.label_email),
Field("language", length=16),
Field("utc_offset", length=16,
readable=False, writable=False),
Field("organisation_id", "integer",
writable=False,
label=messages.label_organisation_id),
Field("site_id", "integer",
writable=False,
label=messages.label_site_id),
Field("registration_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("reset_password_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("deleted", "boolean", writable=False,
readable=False, default=False),
Field("timestmp", "datetime", writable=False,
readable=False, default=""),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()))
else:
# with email-address (Sahana default)
settings.table_user = db.define_table(
settings.table_user_name,
Field("first_name", length=128, default="",
label=messages.label_first_name),
Field("last_name", length=128, default="",
label=messages.label_last_name),
Field("email", length=512, default="",
label=messages.label_email,
unique=True),
Field(passfield, "password", length=512,
readable=False, label=messages.label_password),
Field("language", length=16),
Field("utc_offset", length=16,
readable=False,
writable=False,
label=messages.label_utc_offset),
Field("organisation_id", "integer",
writable=False,
label=messages.label_organisation_id),
Field("site_id", "integer",
writable=False,
label=messages.label_site_id),
Field("registration_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("reset_password_key", length=512,
writable=False, readable=False, default="",
label=messages.label_registration_key),
Field("deleted", "boolean", writable=False,
readable=False, default=False),
Field("timestmp", "datetime", writable=False,
readable=False, default=""),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()))
table = settings.table_user
table.first_name.notnull = True
table.first_name.requires = \
IS_NOT_EMPTY(error_message=messages.is_empty)
if current.deployment_settings.get_L10n_mandatory_lastname():
table.last_name.notnull = True
table.last_name.requires = \
IS_NOT_EMPTY(error_message=messages.is_empty)
table.utc_offset.comment = A(SPAN("[Help]"),
_class="tooltip",
_title="%s|%s" % (messages.label_utc_offset,
messages.help_utc_offset))
try:
from s3validators import IS_UTC_OFFSET
table.utc_offset.requires = IS_EMPTY_OR(IS_UTC_OFFSET())
except:
pass
table[passfield].requires = [CRYPT(key=settings.hmac_key,
min_length=self.settings.password_min_length,
digest_alg="sha512")]
if settings.username_field:
table.username.requires = IS_NOT_IN_DB(db,
"%s.username" % settings.table_user._tablename)
table.email.requires = \
[IS_EMAIL(error_message=messages.invalid_email),
IS_LOWER(),
IS_NOT_IN_DB(db,
"%s.email" % settings.table_user._tablename,
error_message=messages.duplicate_email)]
table.registration_key.default = ""
# Group table (roles)
if not settings.table_group:
settings.table_group = db.define_table(
settings.table_group_name,
# Group unique ID, must be notnull+unique:
Field("uuid",
length=64,
notnull=True,
unique=True,
readable=False,
writable=False),
# Group does not appear in the Role Manager:
# (can neither assign, nor modify, nor delete)
Field("hidden", "boolean",
readable=False,
writable=False,
default=False),
# Group cannot be modified in the Role Manager:
# (can assign, but neither modify nor delete)
Field("system", "boolean",
readable=False,
writable=False,
default=False),
# Group cannot be deleted in the Role Manager:
# (can assign and modify, but not delete)
Field("protected", "boolean",
readable=False,
writable=False,
default=False),
# Role name:
Field("role",
length=512,
default="",
unique=True,
label=messages.label_role),
Field("description", "text",
label=messages.label_description),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_timestamp()+s3_deletion_status()))
table = settings.table_group
table.role.requires = IS_NOT_IN_DB(db, "%s.role"
% settings.table_group._tablename)
# Group membership table (user<->role)
if not settings.table_membership:
settings.table_membership = db.define_table(
settings.table_membership_name,
Field("user_id", settings.table_user,
label=messages.label_user_id),
Field("group_id", settings.table_group,
label=messages.label_group_id),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
table = settings.table_membership
table.user_id.requires = IS_IN_DB(db, "%s.id" %
settings.table_user._tablename,
"%(id)s: %(first_name)s %(last_name)s")
table.group_id.requires = IS_IN_DB(db, "%s.id" %
settings.table_group._tablename,
"%(id)s: %(role)s")
security_policy = current.deployment_settings.get_security_policy()
# Define Eden permission table
self.permission.define_table(migrate=migrate,
fake_migrate=fake_migrate)
if security_policy not in (1, 2, 3, 4, 5, 6) and \
not settings.table_permission:
# Permissions table (group<->permission)
# NB This Web2Py table is deprecated / replaced in Eden by S3Permission
settings.table_permission = db.define_table(
settings.table_permission_name,
Field("group_id", settings.table_group,
label=messages.label_group_id),
Field("name", default="default", length=512,
label=messages.label_name),
Field("table_name", length=512,
label=messages.label_table_name),
Field("record_id", "integer",
label=messages.label_record_id),
migrate = migrate,
fake_migrate=fake_migrate)
table = settings.table_permission
table.group_id.requires = IS_IN_DB(db, "%s.id" %
settings.table_group._tablename,
"%(id)s: %(role)s")
table.name.requires = IS_NOT_EMPTY()
table.table_name.requires = IS_IN_SET(db.tables)
table.record_id.requires = IS_INT_IN_RANGE(0, 10 ** 9)
# Event table (auth log)
# Records Logins & ?
# @ToDo: Deprecate? At least make it configurable?
if not settings.table_event:
settings.table_event = db.define_table(
settings.table_event_name,
Field("time_stamp", "datetime",
default=request.now,
label=messages.label_time_stamp),
Field("client_ip",
default=request.client,
label=messages.label_client_ip),
Field("user_id", settings.table_user, default=None,
requires = IS_IN_DB(db, "%s.id" %
settings.table_user._tablename,
"%(id)s: %(first_name)s %(last_name)s"),
label=messages.label_user_id),
Field("origin", default="auth", length=512,
label=messages.label_origin,
requires = IS_NOT_EMPTY()),
Field("description", "text", default="",
label=messages.label_description,
requires = IS_NOT_EMPTY()),
migrate = migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
# -------------------------------------------------------------------------
def login_bare(self, username, password):
"""
Logs user in
- extended to understand session.s3.roles
"""
request = current.request
session = current.session
db = current.db
table_user = self.settings.table_user
table_membership = self.settings.table_membership
if self.settings.login_userfield:
userfield = self.settings.login_userfield
elif "username" in table_user.fields:
userfield = "username"
else:
userfield = "email"
passfield = self.settings.password_field
user = db(table_user[userfield] == username).select().first()
password = table_user[passfield].validate(password)[0]
if user:
user_id = user.id
if not user.registration_key and user[passfield] == password:
user = Storage(table_user._filter_fields(user, id=True))
session.auth = Storage(user=user,
last_visit=request.now,
expiration=self.settings.expiration)
self.user = user
self.set_roles()
return user
return False
# -------------------------------------------------------------------------
def set_roles(self):
"""
Update session roles and pe_id for the current user
"""
if self.user:
db = current.db
session = current.session
table_user = self.settings.table_user
table_membership = self.settings.table_membership
user_id = self.user.id
# Add the Roles to session.s3
roles = []
query = (table_membership.deleted != True) & \
(table_membership.user_id == user_id)
rows = db(query).select(table_membership.group_id)
session.s3.roles = [s.group_id for s in rows]
# Set pe_id for current user
ltable = current.s3db.pr_person_user
if ltable is not None:
query = (ltable.user_id == user_id)
row = db(query).select(ltable.pe_id, limitby=(0, 1)).first()
if row:
session.auth.user["pe_id"] = row.pe_id
return
# -------------------------------------------------------------------------
def set_cookie(self):
"""
Set a Cookie to the client browser so that we know this user has
registered & so we should present them with a login form instead
of a register form
"""
response = current.response
response.cookies["registered"] = "yes"
response.cookies["registered"]["expires"] = 365 * 24 * 3600 # 1 year
response.cookies["registered"]["path"] = "/"
# -------------------------------------------------------------------------
def login(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT):
"""
Overrides Web2Py's login() to use custom flash styles & utcnow
@returns: a login form
"""
db = current.db
table_user = self.settings.table_user
if self.settings.login_userfield:
username = self.settings.login_userfield
elif "username" in table_user.fields:
username = "username"
else:
username = "email"
old_requires = table_user[username].requires
table_user[username].requires = [IS_NOT_EMPTY(), IS_LOWER()]
request = current.request
response = current.response
session = current.session
passfield = self.settings.password_field
try:
table_user[passfield].requires[-1].min_length = 0
except:
pass
if next is DEFAULT:
next = request.vars._next or self.settings.login_next
if onvalidation is DEFAULT:
onvalidation = self.settings.login_onvalidation
if onaccept is DEFAULT:
onaccept = self.settings.login_onaccept
if log is DEFAULT:
log = self.messages.login_log
user = None # default
# Do we use our own login form, or from a central source?
if self.settings.login_form == self:
form = SQLFORM(
table_user,
fields=[username, passfield],
hidden=dict(_next=request.vars._next),
showid=self.settings.showid,
submit_button=self.messages.submit_button,
delete_label=self.messages.delete_label,
formstyle=self.settings.formstyle,
separator=self.settings.label_separator
)
if self.settings.remember_me_form:
# Add a new input checkbox "remember me for longer"
addrow(form,XML(" "),
DIV(XML(" "),
INPUT(_type='checkbox',
_class='checkbox',
_id="auth_user_remember",
_name="remember",
),
XML(" "),
LABEL(
self.messages.label_remember_me,
_for="auth_user_remember",
)),"",
self.settings.formstyle,
'auth_user_remember__row')
captcha = self.settings.login_captcha or \
(self.settings.login_captcha!=False and self.settings.captcha)
if captcha:
addrow(form, captcha.label, captcha, captcha.comment,
self.settings.formstyle,'captcha__row')
accepted_form = False
if form.accepts(request.vars, session,
formname="login", dbio=False,
onvalidation=onvalidation):
accepted_form = True
if username == "email":
# Check for Domains which can use Google's SMTP server for passwords
# @ToDo: an equivalent email_domains for other email providers
gmail_domains = current.deployment_settings.get_auth_gmail_domains()
if gmail_domains:
from gluon.contrib.login_methods.email_auth import email_auth
domain = form.vars[username].split("@")[1]
if domain in gmail_domains:
self.settings.login_methods.append(
email_auth("smtp.gmail.com:587", "@%s" % domain))
# Check for username in db
query = (table_user[username] == form.vars[username])
user = db(query).select().first()
if user:
# user in db, check if registration pending or disabled
temp_user = user
if temp_user.registration_key == "pending":
response.warning = self.messages.registration_pending
return form
elif temp_user.registration_key in ("disabled", "blocked"):
response.error = self.messages.login_disabled
return form
elif not temp_user.registration_key is None and \
temp_user.registration_key.strip():
response.warning = \
self.messages.registration_verifying
return form
# Try alternate logins 1st as these have the
# current version of the password
user = None
for login_method in self.settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if not self in self.settings.login_methods:
# do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(form.vars)
break
if not user:
# Alternates have failed, maybe because service inaccessible
if self.settings.login_methods[0] == self:
# Try logging in locally using cached credentials
if temp_user[passfield] == form.vars.get(passfield, ""):
# Success
user = temp_user
else:
# User not in db
if not self.settings.alternate_requires_registration:
# We're allowed to auto-register users from external systems
for login_method in self.settings.login_methods:
if login_method != self and \
login_method(request.vars[username],
request.vars[passfield]):
if not self in self.settings.login_methods:
# Do not store password in db
form.vars[passfield] = None
user = self.get_or_create_user(form.vars)
break
if not user:
self.log_event(self.settings.login_failed_log,
request.post_vars)
# Invalid login
session.error = self.messages.invalid_login
redirect(self.url(args=request.args,
vars=request.get_vars))
else:
# Use a central authentication server
cas = self.settings.login_form
cas_user = cas.get_user()
if cas_user:
cas_user[passfield] = None
user = self.get_or_create_user(table_user._filter_fields(cas_user))
form = Storage()
form.vars = user
self.s3_register(form)
elif hasattr(cas, "login_form"):
return cas.login_form()
else:
# we need to pass through login again before going on
next = "%s?_next=%s" % (URL(r=request), next)
redirect(cas.login_url(next))
# Process authenticated users
if user:
user = Storage(table_user._filter_fields(user, id=True))
# If the user hasn't set a personal UTC offset,
# then read the UTC offset from the form:
if not user.utc_offset:
user.utc_offset = session.s3.utc_offset
session.auth = Storage(
user=user,
last_visit=request.now,
expiration = request.vars.get("remember", False) and \
self.settings.long_expiration or self.settings.expiration,
remember = request.vars.has_key("remember"),
hmac_key = web2py_uuid()
)
self.user = user
self.set_roles()
# Read their language from the Profile
language = user.language
current.T.force(language)
session.s3.language = language
session.confirmation = self.messages.logged_in
# Set a Cookie to present user with login box by default
self.set_cookie()
# Update the timestamp of the User so we know when they last logged-in
db(table_user.id == self.user.id).update(timestmp = request.utcnow)
if log and self.user:
self.log_event(log % self.user)
# How to continue
if self.settings.login_form == self:
if accepted_form:
if onaccept:
onaccept(form)
if isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
if next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
table_user[username].requires = old_requires
return form
else:
redirect(next)
# -------------------------------------------------------------------------
def register(self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT):
"""
Overrides Web2Py's register() to add new functionality:
- Checks whether registration is permitted
- Custom Flash styles
- Allow form to be embedded in other pages
- Optional addition of Mobile Phone field to the Register form
- Optional addition of Organisation field to the Register form
- Lookup Domains/Organisations to check for Whitelists
&/or custom Approver
@returns: a registration form
"""
db = current.db
settings = self.settings
messages = self.messages
request = current.request
response = current.response
session = current.session
deployment_settings = current.deployment_settings
# S3: Don't allow registration if disabled
self_registration = deployment_settings.get_security_self_registration()
if not self_registration:
session.error = messages.registration_disabled
redirect(URL(args=["login"]))
if self.is_logged_in() and request.function != "index":
redirect(settings.logged_url)
if next == DEFAULT:
next = request.vars._next or settings.register_next
if onvalidation == DEFAULT:
onvalidation = settings.register_onvalidation
if onaccept == DEFAULT:
onaccept = settings.register_onaccept
if log == DEFAULT:
log = messages.register_log
user = settings.table_user
passfield = settings.password_field
# S3: Organisation field in form?
if deployment_settings.get_auth_registration_requests_organisation():
# Widget set in controllers/default.py
#user.organisation_id.widget =
user.organisation_id.writable = True
if deployment_settings.get_auth_registration_organisation_mandatory():
user.organisation_id.comment = SPAN("*", _class="req")
else:
user.organisation_id.comment = DIV(_class="tooltip",
_title="%s|%s" % (messages.label_organisation_id,
messages.help_organisation))
else:
user.organisation_id.readable = False
user.organisation_id.writable = False
user.organisation_id.default = deployment_settings.get_auth_registration_organisation_id_default()
# @ToDo: Option to request Facility during Registration
user.site_id.readable = False
labels, required = s3_mark_required(user)
#formstyle = current.manager.s3.crud.formstyle
form = SQLFORM(user, hidden=dict(_next=request.vars._next),
labels = labels,
separator = "",
showid=settings.showid,
submit_button=messages.submit_button,
delete_label=messages.delete_label,
#formstyle = formstyle
)
for i, row in enumerate(form[0].components):
item = row[1][0]
if isinstance(item, INPUT) and item["_name"] == passfield:
field_id = "%s_password_two" % user._tablename
#row = formstyle(...)
form[0].insert(i + 1, TR(
TD(LABEL("%s:" % messages.verify_password,
_for="password_two",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
INPUT(_name="password_two",
_id=field_id,
_type="password",
requires=IS_EXPR("value==%s" % \
repr(request.vars.get(passfield, None)),
error_message=messages.mismatched_password)),
SPAN("*", _class="req"),
"", _id=field_id + SQLFORM.ID_ROW_SUFFIX))
#form[0].insert(i + 1, row)
# add an opt in clause to receive emails depending on the deployment settings
if deployment_settings.get_auth_opt_in_to_email():
field_id = "%s_opt_in" % user._tablename
comment = DIV(DIV(_class="tooltip",
_title="%s|%s" % ("Mailing list",
"By selecting this you agree that we may contact you.")))
checked = deployment_settings.get_auth_opt_in_default() and "selected"
form[0].insert(-1,
TR(TD(LABEL("%s:" % "Receive updates",
_for="opt_in",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
INPUT(_name="opt_in", _id=field_id, _type="checkbox", _checked=checked),
TD(comment,
_class="w2p_fc"),
_id=field_id + SQLFORM.ID_ROW_SUFFIX))
# S3: Insert Mobile phone field into form
if deployment_settings.get_auth_registration_requests_mobile_phone():
field_id = "%s_mobile" % user._tablename
if deployment_settings.get_auth_registration_mobile_phone_mandatory():
comment = SPAN("*", _class="req")
else:
comment = DIV(_class="tooltip",
_title="%s|%s" % (deployment_settings.get_ui_label_mobile_phone(),
messages.help_mobile_phone))
form[0].insert(-1,
TR(TD(LABEL("%s:" % deployment_settings.get_ui_label_mobile_phone(),
_for="mobile",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
INPUT(_name="mobile", _id=field_id),
TD(comment,
_class="w2p_fc"),
_id=field_id + SQLFORM.ID_ROW_SUFFIX))
# S3: Insert Photo widget into form
if deployment_settings.get_auth_registration_requests_image():
label = self.messages.label_image
comment = DIV(_class="stickytip",
_title="%s|%s" % (label,
self.messages.help_image % \
dict(gravatar = A("Gravatar",
_target="top",
_href="http://gravatar.com"))))
field_id = "%s_image" % user._tablename
widget = SQLFORM.widgets["upload"].widget(current.s3db.pr_image.image, None)
form[0].insert(-1,
TR(TD(LABEL("%s:" % label,
_for="image",
_id=field_id + SQLFORM.ID_LABEL_SUFFIX),
_class="w2p_fl"),
widget,
TD(comment,
_class="w2p_fc"),
_id=field_id + SQLFORM.ID_ROW_SUFFIX))
if settings.captcha != None:
form[0].insert(-1, TR("", settings.captcha, ""))
import uuid<|fim▁hole|> user.registration_key.default = key = str(uuid.uuid4())
if form.accepts(request.vars, session, formname="register",
onvalidation=onvalidation):
if settings.create_user_groups:
# Not used in S3
description = \
"group uniquely assigned to %(first_name)s %(last_name)s"\
% form.vars
group_id = self.add_group("user_%s" % form.vars.id,
description)
self.add_membership(group_id, form.vars.id)
approved = False
users = db(settings.table_user.id > 0).count()
if users == 1:
# 1st user to register shouldn't need verification/approval
approved = True
elif settings.registration_requires_verification:
# Ensure that we add to the correct Organization
approver, organisation_id = self.s3_approver(form.vars)
if organisation_id:
# @ToDo: Is it correct to override the organisation entered by the user?
# Ideally (if the deployment_settings.auth.registration_requests_organisation = True
# the org could be selected based on the email and the user could then override
form.vars.organisation = organisation_id
# Send the Verification email
if not settings.mailer or \
not settings.mailer.send(to=form.vars.email,
subject=messages.verify_email_subject,
message=messages.verify_email % dict(key=key)):
db.rollback()
response.error = messages.email_verification_failed
return form
# @ToDo: Deployment Setting?
#session.confirmation = messages.email_sent
next = URL(c="default", f="message",
args = ["verify_email_sent"],
vars = {"email": form.vars.email})
elif settings.registration_requires_approval:
# Identify the Approver &
# ensure that we add to the correct Organization
approver, organisation_id = self.s3_approver(form.vars)
if organisation_id:
form.vars.organisation_id = organisation_id
if approver:
# Send the Authorisation email
form.vars.approver = approver
if not settings.mailer or \
not settings.verify_email_onaccept(form.vars):
# We don't wish to prevent registration if the approver mail fails to send
#db.rollback()
session.error = messages.email_approver_failed
#return form
user[form.vars.id] = dict(registration_key="pending")
session.warning = messages.registration_pending_approval
else:
# The domain is Whitelisted
approved = True
else:
# No verification or approval needed
approved = True
approver, organisation_id = self.s3_approver(form.vars)
if organisation_id:
form.vars.organisation = organisation_id
form.vars.registration_key = ""
form.vars.approver = approver
settings.verify_email_onaccept(form.vars)
# Set a Cookie to present user with login box by default
self.set_cookie()
if approved:
user[form.vars.id] = dict(registration_key="")
session.confirmation = messages.registration_successful
table_user = settings.table_user
if "username" in table_user.fields:
username = "username"
else:
username = "email"
query = (table_user[username] == form.vars[username])
user = db(query).select(limitby=(0, 1)).first()
user = Storage(table_user._filter_fields(user, id=True))
if users == 1:
# Add the first user to admin group
admin_group_id = 1
self.add_membership(admin_group_id, user.id)
# If the user hasn't set a personal UTC offset,
# then read the UTC offset from the form:
if not user.utc_offset:
user.utc_offset = session.s3.utc_offset
session.auth = Storage(user=user, last_visit=request.now,
expiration=settings.expiration)
self.user = user
session.flash = messages.logged_in
if log:
self.log_event(log % form.vars)
if onaccept:
onaccept(form)
if not next:
next = self.url(args = request.args)
elif isinstance(next, (list, tuple)):
# fix issue with 2.6
next = next[0]
elif next and not next[0] == "/" and next[:4] != "http":
next = self.url(next.replace("[id]", str(form.vars.id)))
redirect(next)
return form
# -------------------------------------------------------------------------
def profile(
self,
next=DEFAULT,
onvalidation=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT,
):
"""
returns a form that lets the user change his/her profile
.. method:: Auth.profile([next=DEFAULT [, onvalidation=DEFAULT
[, onaccept=DEFAULT [, log=DEFAULT]]]])
Patched for S3 to use s3_mark_required
"""
table_user = self.settings.table_user
if not self.is_logged_in():
redirect(self.settings.login_url)
passfield = self.settings.password_field
self.settings.table_user[passfield].writable = False
request = current.request
session = current.session
if next == DEFAULT:
next = request.get_vars._next \
or request.post_vars._next \
or self.settings.profile_next
if onvalidation == DEFAULT:
onvalidation = self.settings.profile_onvalidation
if onaccept == DEFAULT:
onaccept = self.settings.profile_onaccept
if log == DEFAULT:
log = self.messages.profile_log
labels, required = s3_mark_required(table_user)
form = SQLFORM(
table_user,
self.user.id,
fields = self.settings.profile_fields,
labels = labels,
hidden = dict(_next=next),
showid = self.settings.showid,
submit_button = self.messages.profile_save_button,
delete_label = self.messages.delete_label,
upload = self.settings.download_url,
formstyle = self.settings.formstyle,
separator=""
)
if form.accepts(request, session,
formname='profile',
onvalidation=onvalidation,hideerror=self.settings.hideerror):
self.user.update(table_user._filter_fields(form.vars))
session.flash = self.messages.profile_updated
if log:
self.log_event(log % self.user)
callback(onaccept,form)
if not next:
next = self.url(args=request.args)
elif isinstance(next, (list, tuple)): ### fix issue with 2.6
next = next[0]
elif next and not next[0] == '/' and next[:4] != 'http':
next = self.url(next.replace('[id]', str(form.vars.id)))
redirect(next)
return form
# -------------------------------------------------------------------------
def s3_lookup_org_role(self, organisation_id):
"""
Lookup the Organisation Access Role from the ID of the Organisation
"""
if not organisation_id:
return None
db = current.db
s3db = current.s3db
table = s3db.org_organisation
query = (table.id == organisation_id)
org = db(query).select(table.owned_by_organisation).first()
if org:
return org.owned_by_organisation
return None
# -------------------------------------------------------------------------
def s3_impersonate(self, user_id):
"""
S3 framework function
Designed to be used within tasks, which are run in a separate request
& hence don't have access to current.auth
@param user_id: auth.user.id
"""
session = current.session
db = current.db
if not user_id:
# Anonymous
return None
table_user = self.settings.table_user
user = db(table_user.id == user_id).select(limitby=(0, 1)).first()
if not user:
# Invalid user ID
return False
roles = []
table_membership = self.settings.table_membership
memberships = db(table_membership.user_id == user.id).select(
table_membership.group_id)
roles = [m.group_id for m in memberships]
if session.s3.system_roles.ANONYMOUS:
roles.append(session.s3.system_roles.ANONYMOUS)
session.s3.roles = roles
# Set the language from the Profile
language = user.language
current.T.force(language)
current.session.s3.language = language
user = Storage(table_user._filter_fields(user, id=True))
# Use this user
self.user = user
return user
# -------------------------------------------------------------------------
def s3_register(self, form):
"""
S3 framework function
Designed to be used as an onaccept callback for register()
Whenever someone registers, it:
- adds them to the 'Authenticated' role
- adds their name to the Person Registry
- creates their profile picture
- creates an HRM record
- adds them to the Org_x Access role
"""
db = current.db
manager = current.manager
s3db = current.s3db
vars = form.vars
user_id = vars.id
if not user_id:
return None
# Add to 'Authenticated' role
authenticated = self.id_group("Authenticated")
self.add_membership(authenticated, user_id)
# Link to organisation, lookup org role
organisation_id = self.s3_link_to_organisation(vars)
if organisation_id:
owned_by_organisation = self.s3_lookup_org_role(organisation_id)
else:
owned_by_organisation = None
# Add to Person Registry and Email/Mobile to pr_contact
person_id = self.s3_link_to_person(vars, # user
owned_by_organisation)
if "image" in vars:
if hasattr(vars.image, "file"):
source_file = vars.image.file
original_filename = vars.image.filename
ptable = s3db.pr_person
query = (ptable.id == person_id)
pe_id = db(query).select(ptable.pe_id,
limitby=(0, 1)).first()
if pe_id:
pe_id = pe_id.pe_id
itable = s3db.pr_image
field = itable.image
newfilename = field.store(source_file, original_filename, field.uploadfolder)
url = URL(c="default", f="download", args=newfilename)
fields = dict(pe_id=pe_id,
profile=True,
image=newfilename,
url = url,
title=current.T("Profile Picture"))
if isinstance(field.uploadfield, str):
fields[field.uploadfield] = source_file.read()
itable.insert(**fields)
htable = s3db.table("hrm_human_resource")
if htable and organisation_id:
# Create an HRM entry, if one doesn't already exist
query = (htable.person_id == person_id) & \
(htable.organisation_id == organisation_id)
row = db(query).select(htable.id, limitby=(0, 1)).first()
if not row:
if current.deployment_settings.get_hrm_show_staff():
type = 1 # Staff
else:
type = 2 # Volunteer
id = htable.insert(person_id=person_id,
organisation_id=organisation_id,
type=type,
owned_by_user=user_id,
owned_by_organisation=owned_by_organisation)
record = Storage(id=id)
manager.model.update_super(htable, record)
if owned_by_organisation:
# Add user to the Org Access Role
table = self.settings.table_membership
query = (table.deleted != True) & \
(table.user_id == user_id) & \
(table.group_id == owned_by_organisation)
if not db(query).select(table.id,
limitby=(0, 1)).first():
table.insert(user_id=user_id,
group_id=owned_by_organisation)
# Return person_id for init scripts
return person_id
# -------------------------------------------------------------------------
def s3_link_to_organisation(self, user):
"""
Link a user account to an organisation
@param user: the user account record (= form.vars in s3_register)
"""
db = current.db
s3db = current.s3db
manager = current.manager
organisation_id = user.organisation_id
if not organisation_id:
otable = s3db.org_organisation
name = user.get("organisation_name", None)
acronym = user.get("organisation_acronym", None)
if name:
# Create new organisation
organisation_id = otable.insert(name=name,
acronym=acronym)
# Update the super-entities
record = Storage(id=organisation_id)
manager.model.update_super(otable, record)
# Set record ownership
self.s3_set_record_owner(otable, organisation_id)
user.organisation_id = organisation_id
# Update user record
query = (utable.id == user_id)
db(query).update(organisation_id=organisation_id)
if not organisation_id:
return None
# Create link (if it doesn't exist)
user_id = user.id
ltable = s3db.org_organisation_user
if ltable:
query = (ltable.user_id == user_id) & \
(ltable.organisation_id == organisation_id)
row = db(query).select(ltable.id, limitby=(0, 1)).first()
if not row:
ltable.insert(user_id=user_id,
organisation_id=organisation_id)
return organisation_id
# -------------------------------------------------------------------------
def s3_link_to_person(self,
user=None,
owned_by_organisation=None):
"""
Links user accounts to person registry entries
@param user: the user record
@param owned_by_organisation: the role of the owner organisation
Policy for linking to pre-existing person records:
If a person record with exactly the same first name and
last name exists, which has a contact information record
with exactly the same email address as used in the user
account, and is not linked to another user account, then
this person record will be linked to this user account.
Otherwise, a new person record is created, and a new email
contact record with the email address from the user record
is registered for that person.
"""
db = current.db
s3db = current.s3db
utable = self.settings.table_user
ptable = s3db.pr_person
ctable = s3db.pr_contact
atable = s3db.pr_address
etable = s3db.pr_pentity
ttable = s3db.sit_trackable
gtable = s3db.gis_config
ltable = s3db.pr_person_user
left = [ltable.on(ltable.user_id == utable.id),
ptable.on(ptable.pe_id == ltable.pe_id)]
if user is not None:
if not isinstance(user, (list, tuple)):
user = [user]
user_ids = [u.id for u in user]
query = (utable.id.belongs(user_ids))
else:
query = (utable.id != None)
users = db(query).select(utable.id,
utable.first_name,
utable.last_name,
utable.email,
ltable.pe_id,
ptable.id,
left=left, distinct=True)
utn = utable._tablename
person_ids = [] # Collect the person IDs
for u in users:
person = u.pr_person
if person.id is not None:
person_ids.append(person.id)
continue
user = u[utn]
owner = Storage(owned_by_user=user.id,
owned_by_organisation=owned_by_organisation)
if "email" in user:
# Try to find a matching person record
first_name = user.first_name
last_name = user.last_name
email = user.email.lower()
query = (ptable.first_name == first_name) & \
(ptable.last_name == last_name) & \
(ctable.pe_id == ptable.pe_id) & \
(ctable.contact_method == "EMAIL") & \
(ctable.value.lower() == email)
person = db(query).select(ptable.id,
ptable.pe_id,
limitby=(0, 1)).first()
if person and \
not db(ltable.pe_id == person.pe_id).count():
# Match found, and it isn't linked to another user account
# Insert a link
ltable.insert(user_id=user.id, pe_id=person.pe_id)
# Assign ownership of the Person record
person.update_record(**owner)
# Assign ownership of the Contact record(s)
query = (ctable.pe_id == person.pe_id)
db(query).update(**owner)
# Assign ownership of the Address record(s)
query = (atable.pe_id == person.pe_id)
db(query).update(**owner)
# Assign ownership of the Config record(s)
query = (gtable.pe_id == person.pe_id)
db(query).update(**owner)
# HR records
self.s3_register_staff(user.id, person.id)
# Set pe_id if this is the current user
if self.user and self.user.id == user.id:
self.user.pe_id = person.pe_id
person_ids.append(person.id)
continue
# Create a PE
pe_id = etable.insert(instance_type="pr_person",
deleted=False)
# Create a TE
track_id = ttable.insert(instance_type="pr_person",
deleted=False)
if pe_id:
# Create a new person record
if current.request.vars.get("opt_in", None):
opt_in = current.deployment_settings.get_auth_opt_in_team_list()
else:
opt_in = ""
new_id = ptable.insert(pe_id = pe_id,
track_id = track_id,
first_name = first_name,
last_name = last_name,
opt_in = opt_in,
modified_by = user.id,
**owner)
if new_id:
# Insert a link
ltable.insert(user_id=user.id, pe_id=pe_id)
# Register the new person UUID in the PE and TE
person_uuid = ptable[new_id].uuid
db(etable.id == pe_id).update(uuid=person_uuid)
db(ttable.id == track_id).update(uuid=person_uuid)
# Add the email to pr_contact
ctable.insert(pe_id = pe_id,
contact_method = "EMAIL",
priority = 1,
value = email,
**owner)
# Add the mobile to pr_contact
mobile = current.request.vars.get("mobile", None)
if mobile:
ctable.insert(
pe_id = pe_id,
contact_method = "SMS",
priority = 2,
value = mobile,
**owner)
person_ids.append(new_id)
# Add the user to each team if they have chosen to opt-in
g_table = s3db["pr_group"]
gm_table = s3db["pr_group_membership"]
for team in opt_in:
query = (g_table.name == team)
team_rec = db(query).select(g_table.id, limitby=(0, 1)).first()
# if the team doesn't exist then add it
if team_rec == None:
team_id = g_table.insert(name = team, group_type = 5)
else:
team_id = team_rec.id
gm_table.insert(group_id = team_id,
person_id = new_id)
# Set pe_id if this is the current user
if self.user and self.user.id == user.id:
self.user.pe_id = pe_id
if len(person_ids) == 1:
return person_ids[0]
else:
return person_ids
# -------------------------------------------------------------------------
def s3_approver(self, user):
"""
Returns the Approver for a new Registration &
the organisation_id field
@param: user - the user record (form.vars when done direct)
"""
db = current.db
s3db = current.s3db
deployment_settings = current.deployment_settings
# Default Approver
approver = deployment_settings.get_mail_approver()
organisation_id = None
# Check for Domain: Whitelist or specific Approver
table = s3db.auth_organisation
address, domain = user.email.split("@", 1)
query = (table.domain == domain)
record = db(query).select(table.organisation_id,
table.approver,
limitby=(0, 1)).first()
if record:
organisation_id = record.organisation_id
approver = record.approver
elif deployment_settings.get_auth_registration_requests_organisation():
# Check for an Organization-specific Approver
organisation_id = user.get("organisation_id",
None)
if organisation_id:
query = (table.organisation_id == organisation_id)
record = db(query).select(table.approver,
limitby=(0, 1)).first()
if record and record.approver:
approver = record.approver
return approver, organisation_id
# -------------------------------------------------------------------------
def verify_email(self,
next=DEFAULT,
onaccept=DEFAULT,
log=DEFAULT):
"""
action user to verify the registration email, XXXXXXXXXXXXXXXX
.. method:: Auth.verify_email([next=DEFAULT [, onvalidation=DEFAULT
[, onaccept=DEFAULT [, log=DEFAULT]]]])
"""
db = current.db
settings = self.settings
messages = self.messages
deployment_settings = current.deployment_settings
key = current.request.args[-1]
table_user = settings.table_user
user = db(table_user.registration_key == key).select().first()
if not user:
redirect(settings.verify_email_next)
# S3: Lookup the Approver
approver, organisation_id = self.s3_approver(user)
if settings.registration_requires_approval and approver:
user.update_record(registration_key = "pending")
current.session.flash = messages.registration_pending_approval
else:
user.update_record(registration_key = "")
current.session.flash = messages.email_verified
if log == DEFAULT:
log = messages.verify_email_log
if next == DEFAULT:
next = settings.verify_email_next
if onaccept == DEFAULT:
onaccept = settings.verify_email_onaccept
if log:
self.log_event(log % user)
if approver:
user.approver = approver
callback(onaccept, user)
redirect(next)
# -------------------------------------------------------------------------
def s3_verify_email_onaccept(self, form):
""""
Sends a message to the approver to notify them if a user needs approval
If deployment_settings.auth.always_notify_approver = True,
send them notification regardless
"""
if form.registration_key == "": # User Approved
if not current.deployment_settings.get_auth_always_notify_approver():
return
subject = current.T("%(system_name)s - New User Registered") % \
{"system_name": current.deployment_settings.get_system_name()}
message = self.messages.new_user % dict(first_name = form.first_name,
last_name = form.last_name,
email = form.email)
else:
subject = current.T("%(system_name)s - New User Registration Approval Pending") % \
{"system_name": current.deployment_settings.get_system_name()}
message = self.messages.approve_user % \
dict(first_name=form.first_name,
last_name=form.last_name,
email=form.email)
result = self.settings.mailer.send(to=form.approver,
subject=subject,
message=message)
return result
# -------------------------------------------------------------------------
def s3_register_staff(self, user_id, person_id):
"""
Take ownership of the HR records of the person record,
and add user to the Org Access role.
To be called by s3_link_to_person in case a newly registered
user record gets linked to a prior existing person record.
@param user_id: the user record ID
@param person_id: the person record ID
"""
db = current.db
s3db = current.s3db
manager = current.manager
htable = s3db.table("hrm_human_resource")
if htable is None:
# HR module disabled: skip
return
rtable = self.settings.table_group
mtable = self.settings.table_membership
utable = self.settings.table_user
# User owns their own HRM records
query = (htable.person_id == person_id)
db(query).update(owned_by_user=user_id)
query &= ((htable.status == 1) &
(htable.deleted != True))
rows = db(query).select(htable.owned_by_organisation)
org_roles = []
for row in rows:
org_role = row.owned_by_organisation
if org_role and org_role not in org_roles:
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id == org_role)
if not db(query).select(limitby=(0, 1)).first():
org_roles.append(dict(user_id=user_id,
group_id=org_role))
if org_roles:
mtable.bulk_insert(org_roles)
# -------------------------------------------------------------------------
def s3_logged_in(self):
"""
Check whether the user is currently logged-in
- tries Basic if not
"""
if self.override:
return True
session = current.session
if not self.is_logged_in():
basic = self.basic()
try:
return basic[2]
except TypeError:
# old web2py
return basic
except:
return False
return True
# -------------------------------------------------------------------------
# Role Management
# -------------------------------------------------------------------------
def get_system_roles(self):
"""
Get the IDs of the session roles by their UIDs, and store them
into the current session. To be run once per session, as these
IDs should never change.
Caution: do NOT cache the result, otherwise a newly installed
system would be completely open during the caching period!
"""
session = current.session
try:
if session.s3.system_roles:
return session.s3.system_roles
except:
pass
db = current.db
rtable = self.settings.table_group
if rtable is not None:
system_roles = self.S3_SYSTEM_ROLES
query = (rtable.deleted != True) & \
rtable.uuid.belongs(system_roles.values())
rows = db(query).select(rtable.id, rtable.uuid)
sr = Storage([(role.uuid, role.id) for role in rows])
else:
sr = Storage([(uid, None) for uid in self.S3_SYSTEM_ROLES])
session.s3.system_roles = sr
return sr
# -------------------------------------------------------------------------
def s3_create_role(self, role, description=None, *acls, **args):
"""
Back-end method to create roles with ACLs
@param role: display name for the role
@param description: description of the role (optional)
@param acls: list of initial ACLs to assign to this role
@param args: keyword arguments (see below)
@keyword name: a unique name for the role
@keyword hidden: hide this role completely from the RoleManager
@keyword system: role can be assigned, but neither modified nor
deleted in the RoleManager
@keyword protected: role can be assigned and edited, but not
deleted in the RoleManager
"""
table = self.settings.table_group
hidden = args.get("hidden", False)
system = args.get("system", False)
protected = args.get("protected", False)
uid = args.get("uid", None)
if uid:
query = (table.uuid == uid)
record = current.db(query).select(limitby=(0, 1)).first()
else:
record = None
import uuid
uid = uuid.uuid4()
if record:
role_id = record.id
record.update_record(deleted=False,
role=role,
description=description,
hidden=hidden,
system=system,
protected=protected)
else:
role_id = table.insert(uuid=uid,
role=role,
description=description,
hidden=hidden,
system=system,
protected=protected)
if role_id:
for acl in acls:
self.s3_update_acl(role_id, **acl)
return role_id
# -------------------------------------------------------------------------
def s3_delete_role(self, role_id):
"""
Remove a role from the system.
@param role_id: the ID or UID of the role
@note: protected roles cannot be deleted with this function,
need to reset the protected-flag first to override
"""
db = current.db
table = self.settings.table_group
if isinstance(role_id, str) and not role_id.isdigit():
gquery = (table.uuid == role_id)
else:
role_id = int(role_id)
gquery = (table.id == role_id)
role = db(gquery).select(limitby=(0, 1)).first()
if role and not role.protected:
# Remove all memberships for this role
mtable = self.settings.table_membership
mquery = (mtable.group_id == role.id)
db(mquery).update(deleted=True)
# Remove all ACLs for this role
ptable = self.permission.table
pquery = (ptable.group_id == role.id)
db(pquery).update(deleted=True)
# Remove the role
db(gquery).update(role=None, deleted=True)
# -------------------------------------------------------------------------
def resolve_role_ids(self, roles):
"""
Resolve role UIDs
@param roles: list of role IDs or UIDs (or mixed)
"""
db = current.db
if not isinstance(roles, (list, tuple)):
roles = [roles]
role_ids = []
role_uids = []
for role_id in roles:
if isinstance(role_id, str) and not role_id.isdigit():
role_uids.append(role_id)
else:
_id = int(role_id)
if _id not in role_ids:
role_ids.append(_id)
if role_uids:
rtable = self.settings.table_group
query = (rtable.deleted != True) & \
(rtable.uuid.belongs(role_uids))
rows = db(query).select(rtable.id)
role_ids += [r.id for r in rows if r.id not in role_ids]
return role_ids
# -------------------------------------------------------------------------
def s3_assign_role(self, user_id, role_id):
"""
Assigns a role to a user
@param user_id: the record ID of the user account
@param role_id: the record ID(s)/UID(s) of the role
@note: strings or lists of strings are assumed to be
role UIDs
"""
db = current.db
rtable = self.settings.table_group
mtable = self.settings.table_membership
query = (rtable.deleted != True)
if isinstance(role_id, (list, tuple)):
if isinstance(role_id[0], str):
query &= (rtable.uuid.belongs(role_id))
else:
roles = role_id
elif isinstance(role_id, str):
query &= (rtable.uuid == role_id)
else:
roles = [role_id]
if query is not None:
roles = db(query).select(rtable.id)
roles = [r.id for r in roles]
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id.belongs(roles))
assigned = db(query).select(mtable.group_id)
assigned_roles = [r.group_id for r in assigned]
for role in roles:
if role not in assigned_roles:
mtable.insert(user_id=user_id, group_id=role)
# -------------------------------------------------------------------------
def s3_retract_role(self, user_id, role_id):
"""
Removes a role assignment from a user account
@param user_id: the record ID of the user account
@param role_id: the record ID(s)/UID(s) of the role
@note: strings or lists of strings are assumed to be
role UIDs
"""
if not role_id:
return
db = current.db
rtable = self.settings.table_group
mtable = self.settings.table_membership
query = (rtable.deleted != True)
if isinstance(role_id, (list, tuple)):
if isinstance(role_id[0], str):
query &= (rtable.uuid.belongs(role_id))
else:
roles = role_id
elif isinstance(role_id, str):
query &= (rtable.uuid == role_id)
else:
roles = [role_id]
if query is not None:
roles = db(query).select(rtable.id)
roles = [r.id for r in roles]
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id.belongs(roles))
db(query).update(deleted=True)
# -------------------------------------------------------------------------
def s3_has_role(self, role):
"""
Check whether the currently logged-in user has a role
@param role: the record ID or UID of the role
"""
if self.override:
return True
db = current.db
session = current.session
if not session.s3:
return False
# Trigger HTTP basic auth
self.s3_logged_in()
roles = session.s3.roles
if not roles:
return False
system_roles = session.s3.system_roles
if system_roles and system_roles.ADMIN in roles:
# Administrators have all roles
return True
if isinstance(role, str):
if role.isdigit():
role = int(role)
else:
rtable = self.settings.table_group
query = (rtable.deleted != True) & \
(rtable.uuid == role)
row = db(query).select(rtable.id, limitby=(0, 1)).first()
if row:
role = row.id
else:
return False
return role in session.s3.roles
# -------------------------------------------------------------------------
# ACL management
# -------------------------------------------------------------------------
def s3_update_acls(self, role, *acls):
"""
Wrapper for s3_update_acl to allow batch updating
"""
for acl in acls:
self.s3_update_acl(role, **acl)
# -------------------------------------------------------------------------
def s3_update_acl(self, role,
c=None, f=None, t=None, oacl=None, uacl=None,
organisation=None):
"""
Back-end method to update an ACL
"""
ALL = "all"
all_organisations = organisation == ALL
if all_organisations:
organisation = None
table = self.permission.table
if not table:
# ACLs not relevant to this security policy
return None
if c is None and f is None and t is None:
return None
if t is not None:
c = f = None
if uacl is None:
uacl = self.permission.NONE
if oacl is None:
oacl = uacl
if role:
query = ((table.group_id == role) & \
(table.controller == c) & \
(table.function == f) & \
(table.tablename == t))
record = current.db(query).select(table.id, limitby=(0, 1)).first()
acl = dict(deleted=False,
group_id=role,
controller=c,
function=f,
tablename=t,
oacl=oacl,
uacl=uacl,
all_organisations=all_organisations,
organisation=organisation)
if record:
success = record.update_record(**acl)
else:
success = table.insert(**acl)
return success
# -------------------------------------------------------------------------
# Utilities
# -------------------------------------------------------------------------
def s3_group_members(self, group_id):
"""
Get a list of members of a group
@param group_id: the group record ID
@returns: a list of the user_ids for members of a group
"""
membership = self.settings.table_membership
query = (membership.deleted != True) & \
(membership.group_id == group_id)
members = current.db(query).select(membership.user_id)
return [member.user_id for member in members]
# -------------------------------------------------------------------------
def s3_user_pe_id(self, user_id):
"""
Get the person pe_id for a user ID
@param user_id: the user ID
"""
db = current.db
s3db = current.s3db
ltable = s3db.pr_person_user
query = (ltable.user_id == user_id)
row = db(query).select(ltable.pe_id, limitby=(0, 1)).first()
if row:
return row.pe_id
return None
# -------------------------------------------------------------------------
def s3_logged_in_person(self):
"""
Get the person record ID for the current logged-in user
"""
db = current.db
s3db = current.s3db
ptable = s3db.pr_person
if self.s3_logged_in():
try:
query = (ptable.pe_id == self.user.pe_id)
except AttributeError:
# Prepop
pass
else:
record = db(query).select(ptable.id,
limitby=(0, 1)).first()
if record:
return record.id
return None
# -------------------------------------------------------------------------
def s3_logged_in_human_resource(self):
"""
Get the person record ID for the current logged-in user
"""
db = current.db
s3db = current.s3db
ptable = s3db.pr_person
htable = s3db.hrm_human_resource
if self.s3_logged_in():
try:
query = (htable.person_id == ptable.id) & \
(ptable.pe_id == self.user.pe_id)
except AttributeError:
# Prepop
pass
else:
record = db(query).select(htable.id,
orderby =~htable.modified_on,
limitby=(0, 1)).first()
if record:
return record.id
return None
# -------------------------------------------------------------------------
def s3_has_permission(self, method, table, record_id = 0):
"""
S3 framework function to define whether a user can access a record
in manner "method". Designed to be called from the RESTlike
controller.
@param table: the table or tablename
"""
if self.override:
return True
db = current.db
session = current.session
if not hasattr(table, "_tablename"):
s3db = current.s3db
table = s3db[table]
if session.s3.security_policy == 1:
# Simple policy
# Anonymous users can Read.
if method == "read":
authorised = True
else:
# Authentication required for Create/Update/Delete.
authorised = self.s3_logged_in()
elif session.s3.security_policy == 2:
# Editor policy
# Anonymous users can Read.
if method == "read":
authorised = True
elif method == "create":
# Authentication required for Create.
authorised = self.s3_logged_in()
elif record_id == 0 and method == "update":
# Authenticated users can update at least some records
authorised = self.s3_logged_in()
else:
# Editor role required for Update/Delete.
authorised = self.s3_has_role("Editor")
if not authorised and self.user and "owned_by_user" in table:
# Creator of Record is allowed to Edit
query = (table.id == record_id)
record = db(query).select(table.owned_by_user,
limitby=(0, 1)).first()
if record and self.user.id == record.owned_by_user:
authorised = True
elif session.s3.security_policy == 3:
# Controller ACLs
self.permission.use_cacls = True
self.permission.use_facls = False
self.permission.use_tacls = False
authorised = self.permission.has_permission(table,
record=record_id,
method=method)
elif session.s3.security_policy == 4:
# Controller+Function ACLs
self.permission.use_cacls = True
self.permission.use_facls = True
self.permission.use_tacls = False
authorised = self.permission.has_permission(table,
record=record_id,
method=method)
elif session.s3.security_policy >= 5:
# Controller+Function+Table ACLs
self.permission.use_cacls = True
self.permission.use_facls = True
self.permission.use_tacls = True
authorised = self.permission.has_permission(table,
record=record_id,
method=method)
else:
# Full policy
if self.s3_logged_in():
# Administrators are always authorised
if self.s3_has_role(1):
authorised = True
else:
# Require records in auth_permission to specify access
# (default Web2Py-style)
authorised = self.has_permission(method, table, record_id)
else:
# No access for anonymous
authorised = False
return authorised
# -------------------------------------------------------------------------
def s3_accessible_query(self, method, table):
"""
Returns a query with all accessible records for the currently
logged-in user
@note: This method does not work on GAE because it uses JOIN and IN
"""
if self.override:
return table.id > 0
db = current.db
session = current.session
T = current.T
policy = session.s3.security_policy
if policy == 1:
# "simple" security policy: show all records
return table.id > 0
elif policy == 2:
# "editor" security policy: show all records
return table.id > 0
elif policy in (3, 4, 5, 6):
# ACLs: use S3Permission method
query = self.permission.accessible_query(table, method)
return query
# "Full" security policy
if self.s3_has_role(1):
# Administrators can see all data
return table.id > 0
# If there is access to the entire table then show all records
try:
user_id = self.user.id
except:
user_id = 0
if self.has_permission(method, table, 0, user_id):
return table.id > 0
# Filter Records to show only those to which the user has access
session.warning = T("Only showing accessible records!")
membership = self.settings.table_membership
permission = self.settings.table_permission
return table.id.belongs(db(membership.user_id == user_id)\
(membership.group_id == permission.group_id)\
(permission.name == method)\
(permission.table_name == table)\
._select(permission.record_id))
# -------------------------------------------------------------------------
def s3_has_membership(self, group_id=None, user_id=None, role=None):
"""
Checks if user is member of group_id or role
Extends Web2Py's requires_membership() to add new functionality:
- Custom Flash style
- Uses s3_has_role()
"""
if self.override:
return True
group_id = group_id or self.id_group(role)
try:
group_id = int(group_id)
except:
group_id = self.id_group(group_id) # interpret group_id as a role
if self.s3_has_role(group_id):
r = True
else:
r = False
log = self.messages.has_membership_log
if log:
if not user_id and self.user:
user_id = self.user.id
self.log_event(log % dict(user_id=user_id,
group_id=group_id, check=r))
return r
# Override original method
has_membership = s3_has_membership
# -------------------------------------------------------------------------
def s3_requires_membership(self, role):
"""
Decorator that prevents access to action if not logged in or
if user logged in is not a member of group_id. If role is
provided instead of group_id then the group_id is calculated.
Extends Web2Py's requires_membership() to add new functionality:
- Custom Flash style
- Uses s3_has_role()
- Administrators (id=1) are deemed to have all roles
"""
def decorator(action):
def f(*a, **b):
if self.override:
return action(*a, **b)
if not self.s3_logged_in():
request = current.request
next = URL(args=request.args, vars=request.get_vars)
import urllib
redirect("%s?_next=%s" % (self.settings.login_url,
urllib.quote(next)))
if not self.s3_has_role(role) and not self.s3_has_role(1):
current.session.error = self.messages.access_denied
next = self.settings.on_failed_authorization
redirect(next)
return action(*a, **b)
f.__doc__ = action.__doc__
return f
return decorator
# Override original method
requires_membership = s3_requires_membership
# -------------------------------------------------------------------------
def s3_make_session_owner(self, table, record_id):
"""
Makes the current session owner of a record
@param table: the table or table name
@param record_id: the record ID
"""
if hasattr(table, "_tablename"):
table = table._tablename
if not self.user:
session = current.session
if "owned_records" not in session:
session.owned_records = Storage()
records = session.owned_records.get(table, [])
record_id = str(record_id)
if record_id not in records:
records.append(record_id)
session.owned_records[table] = records
# -------------------------------------------------------------------------
def s3_session_owns(self, table, record_id):
"""
Checks whether the current session owns a record
@param table: the table or table name
@param record_id: the record ID
"""
if hasattr(table, "_tablename"):
table = table._tablename
if not self.user:
try:
records = current.session.owned_records.get(table, [])
except:
records = []
if str(record_id) in records:
return True
return False
# -------------------------------------------------------------------------
def s3_set_record_owner(self, table, record):
"""
Set the owner organisation for a record
@param table: the table or table name
@param record: the record (as row) or record ID
"""
db = current.db
s3db = current.s3db
manager = current.manager
site_types = self.org_site_types
OWNED_BY_ORG = "owned_by_organisation"
ORG_ID = "organisation_id"
ORG_PREFIX = "Org_%s"
ORG_TABLENAME = "org_organisation"
NAME = "name"
org_table = s3db[ORG_TABLENAME]
grp_table = self.settings.table_group
# Get the table
if isinstance(table, str):
table = s3db[table]
tablename = table._tablename
_id = table._id.name
# Which fields are available?
fields = [table._id.name,
NAME,
ORG_ID,
OWNED_BY_ORG]
fields = [table[f] for f in fields if f in table.fields]
# Get the record
if not isinstance(record, Row):
record_id = record
record = db(table._id == record_id).select(limitby=(0, 1),
*fields).first()
else:
if table._id.name in record:
record_id = record[table._id.name]
else:
record_id = None
missing = [f for f in fields if f not in record]
if missing:
if record_id:
query = table._id == record_id
record = db(query).select(limitby=(0, 1),
*fields).first()
else:
record = None
if not record:
# Raise an exception here?
return
# Get the organisation ID
org_role = None
if tablename == ORG_TABLENAME:
organisation_id = record[_id]
if OWNED_BY_ORG in record:
org_role = record[OWNED_BY_ORG]
if not org_role:
# Create a new org_role
uuid = ORG_PREFIX % organisation_id
if NAME in table:
name = record[NAME]
else:
name = uuid
role = Storage(uuid=uuid,
deleted=False,
hidden=False,
system=True,
protected=True,
role="%s (Organisation)" % name,
description="All Staff of Organization %s" % name)
query = (grp_table.uuid == role.uuid) | \
(grp_table.role == role.role)
record = db(query).select(grp_table.id,
limitby=(0, 1)).first()
if not record:
org_role = grp_table.insert(**role)
else:
record.update_record(**role)
org_role = record.id
elif ORG_ID in table:
organisation_id = record[ORG_ID]
# Get the org_role from the organisation
if organisation_id:
query = org_table.id == organisation_id
organisation = db(query).select(org_table[OWNED_BY_ORG],
limitby=(0, 1)).first()
if organisation:
org_role = organisation[OWNED_BY_ORG]
# Update the record as necessary
data = Storage()
if org_role and OWNED_BY_ORG in table:
data[OWNED_BY_ORG] = org_role
if data and hasattr(record, "update_record"):
record.update_record(**data)
elif data and record_id:
db(table._id == record_id).update(**data)
return
# -------------------------------------------------------------------------
def s3_send_welcome_email(self, user):
"""
Send a welcome mail to newly-registered users
- especially suitable for users from Facebook/Google who don't
verify their emails
"""
if "name" in user:
user["first_name"] = user["name"]
if "family_name" in user:
# Facebook
user["last_name"] = user["family_name"]
subject = self.messages.welcome_email_subject
message = self.messages.welcome_email
self.settings.mailer.send(user["email"], subject=subject, message=message)
# =============================================================================
class S3Permission(object):
"""
S3 Class to handle permissions
@author: Dominic König <[email protected]>
"""
TABLENAME = "s3_permission"
CREATE = 0x0001
READ = 0x0002
UPDATE = 0x0004
DELETE = 0x0008
ALL = CREATE | READ | UPDATE | DELETE
NONE = 0x0000 # must be 0!
PERMISSION_OPTS = OrderedDict([
#(NONE, "NONE"),
#(READ, "READ"),
#(CREATE|UPDATE|DELETE, "WRITE"),
[CREATE, "CREATE"],
[READ, "READ"],
[UPDATE, "UPDATE"],
[DELETE, "DELETE"]])
# Method string <-> required permission
METHODS = Storage({
"create": CREATE,
"import": CREATE,
"read": READ,
"report": READ,
"search": READ,
"update": UPDATE,
"delete": DELETE})
# Policy helpers
most_permissive = lambda self, acl: \
reduce(lambda x, y: (x[0]|y[0], x[1]|y[1]),
acl, (self.NONE, self.NONE))
most_restrictive = lambda self, acl: \
reduce(lambda x, y: (x[0]&y[0], x[1]&y[1]),
acl, (self.ALL, self.ALL))
# -------------------------------------------------------------------------
def __init__(self, auth, tablename=None):
"""
Constructor, invoked by AuthS3.__init__
@param tablename: the name for the permissions table
"""
# Instantiated once per request, but before Auth tables
# are defined and authentication is checked, thus no use
# to check permissions in the constructor
# Auth
self.auth = auth
# Deployment settings
settings = current.deployment_settings
self.policy = settings.get_security_policy()
# Which level of granularity do we want?
self.use_cacls = self.policy in (3, 4, 5, 6) # Controller ACLs
self.use_facls = self.policy in (4, 5, 6) # Function ACLs
self.use_tacls = self.policy in (5, 6) # Table ACLs
self.org_roles = self.policy == 6 # OrgAuth
self.modules = settings.modules
# If a large number of roles in the system turnes into a bottleneck
# in policy 6, then we could reduce the number of roles in
# subsequent queries; this would though add another query (or even two
# more queries) to the request, so the hypothetic performance gain
# should first be confirmed by tests:
#if self.policy == 6:
#gtable = auth.settings.table_group
#org_roles = current.db(gtable.uid.like("Org_%")).select(gtable.id)
#self.org_roles = [r.id for r in org_roles]
#else:
#self.org_roles = []
# Permissions table
self.tablename = tablename or self.TABLENAME
self.table = current.db.get(self.tablename, None)
# Error messages
T = current.T
self.INSUFFICIENT_PRIVILEGES = T("Insufficient Privileges")
self.AUTHENTICATION_REQUIRED = T("Authentication Required")
# Request information
request = current.request
self.controller = request.controller
self.function = request.function
# Request format
self.format = request.extension
if "format" in request.get_vars:
ext = request.get_vars.format
if isinstance(ext, list):
ext = ext[-1]
self.format = ext.lower() or self.format
else:
ext = [a for a in request.args if "." in a]
if ext:
self.format = ext[-1].rsplit(".", 1)[1].lower()
if request.function == "ticket" and \
request.controller == "admin":
# Error tickets need an override
self.format = "html"
# Page permission cache
self.page_acls = Storage()
self.table_acls = Storage()
# Pages which never require permission:
# Make sure that any data access via these pages uses
# accessible_query explicitly!
self.unrestricted_pages = ("default/index",
"default/user",
"default/contact",
"default/about")
# Default landing pages
_next = URL(args=request.args, vars=request.vars)
self.homepage = URL(c="default", f="index")
self.loginpage = URL(c="default", f="user", args="login",
vars=dict(_next=_next))
# -------------------------------------------------------------------------
def define_table(self, migrate=True, fake_migrate=False):
"""
Define permissions table, invoked by AuthS3.define_tables()
"""
db = current.db
table_group = self.auth.settings.table_group
if table_group is None:
table_group = "integer" # fallback (doesn't work with requires)
if not self.table:
self.table = db.define_table(self.tablename,
Field("group_id", table_group),
Field("controller", length=64),
Field("function", length=512),
Field("tablename", length=512),
Field("oacl", "integer", default=self.ALL),
Field("uacl", "integer", default=self.READ),
# Only apply to records owned by this
# organisation role (policy 6 only):
Field("all_organisations", "boolean",
default=False),
Field("organisation",
table_group,
requires = IS_NULL_OR(IS_IN_DB(
db, table_group.id))),
migrate=migrate,
fake_migrate=fake_migrate,
*(s3_uid()+s3_timestamp()+s3_deletion_status()))
# -------------------------------------------------------------------------
def __call__(self,
c=None,
f=None,
table=None,
record=None):
"""
Get the ACL for the current user for a path
@param c: the controller name (falls back request.controller)
@param f: the function name (falls back to request.function)
@param table: the table
@param record: the record ID (or the Row if already loaded)
@note: if passing a Row, it must contain all available ownership
fields (id, owned_by_user, owned_by_group), otherwise the
record will be re-loaded by this function
"""
_debug("auth.permission(c=%s, f=%s, table=%s, record=%s)" %
(c, f, table, record))
t = self.table # Permissions table
auth = self.auth
sr = auth.get_system_roles()
if record == 0:
record = None
# Get user roles, check logged_in to trigger HTTPBasicAuth
if not auth.s3_logged_in():
roles = [sr.ANONYMOUS]
else:
roles = [sr.AUTHENTICATED]
if current.session.s3 is not None:
roles = current.session.s3.roles or roles
if not self.use_cacls:
# Fall back to simple authorization
_debug("Simple authorization")
if auth.s3_logged_in():
_debug("acl=%04x" % self.ALL)
return self.ALL
else:
_debug("acl=%04x" % self.READ)
return self.READ
if sr.ADMIN in roles:
_debug("Administrator, acl=%04x" % self.ALL)
return self.ALL
# Fall back to current request
c = c or self.controller
f = f or self.function
# Do we need to check the owner role (i.e. table+record given)?
is_owner = False
require_org = None
if table is not None and record is not None:
owner_role, owner_user, owner_org = \
self.get_owners(table, record)
is_owner = self.is_owner(table, None,
owner_role=owner_role,
owner_user=owner_user,
owner_org=owner_org)
if self.policy == 6:
require_org = owner_org
# Get the applicable ACLs
page_acl = self.page_acl(c=c, f=f,
require_org=require_org)
if table is None or not self.use_tacls:
acl = page_acl
else:
if sr.EDITOR in roles:
table_acl = (self.ALL, self.ALL)
else:
table_acl = self.table_acl(table=table,
c=c,
default=page_acl,
require_org=require_org)
acl = self.most_restrictive((page_acl, table_acl))
# Decide which ACL to use for this case
if acl[0] == self.NONE and acl[1] == self.NONE:
# No table access at all
acl = self.NONE
elif record is None:
# No record specified, return most permissive ACL
acl = (acl[0] & ~self.CREATE) | acl[1]
else:
# ACL based on ownership
acl = is_owner and (acl[0] | acl[1]) or acl[1]
_debug("acl=%04x" % acl)
return acl
# -------------------------------------------------------------------------
def page_acl(self, c=None, f=None, require_org=None):
"""
Get the ACL for a page
@param c: the controller (falls back to current request)
@param f: the function (falls back to current request)
@returns: tuple of (ACL for owned resources, ACL for all resources)
"""
session = current.session
policy = self.policy
t = self.table
sr = self.auth.get_system_roles()
most_permissive = self.most_permissive
roles = []
if session.s3 is not None:
roles = session.s3.roles or []
if sr.ADMIN in roles:
# Admin always has rights
return (self.ALL, self.ALL)
c = c or self.controller
f = f or self.function
page = "%s/%s" % (c, f)
if page in self.unrestricted_pages:
page_acl = (self.ALL, self.ALL)
elif c not in self.modules or \
c in self.modules and not self.modules[c].restricted or \
not self.use_cacls:
# Controller is not restricted => simple authorization
if self.auth.s3_logged_in():
page_acl = (self.ALL, self.ALL)
else:
page_acl = (self.READ, self.READ)
else:
# Lookup cached result
page_acl = self.page_acls.get((page, require_org), None)
if page_acl is None:
page_acl = (self.NONE, self.NONE) # default
q = ((t.deleted != True) & \
(t.controller == c) & \
((t.function == f) | (t.function == None)))
if roles:
query = (t.group_id.belongs(roles)) & q
else:
query = (t.group_id == None) & q
# Additional restrictions in OrgAuth
if policy == 6 and require_org:
field = t.organisation
query &= ((t.all_organisations == True) | \
(field == require_org) | (field == None))
rows = current.db(query).select()
if rows:
# ACLs found, check for function-specific
controller_acl = []
function_acl = []
for row in rows:
if not row.function:
controller_acl += [(row.oacl, row.uacl)]
else:
function_acl += [(row.oacl, row.uacl)]
# Function-specific ACL overrides Controller ACL
if function_acl and self.use_facls:
page_acl = most_permissive(function_acl)
elif controller_acl:
page_acl = most_permissive(controller_acl)
# Remember this result
self.page_acls.update({(page, require_org): page_acl})
return page_acl
# -------------------------------------------------------------------------
def table_acl(self, table=None, c=None, default=None,
require_org=None):
"""
Get the ACL for a table
@param table: the table
@param c: the controller (falls back to current request)
@param default: ACL to apply if no specific table ACL is found
@returns: tuple of (ACL for owned resources, ACL for all resources)
"""
if table is None or not self.use_tacls:
return self.page_acl(c=c)
policy = self.policy
t = self.table
sr = self.auth.get_system_roles()
roles = []
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if sr.ADMIN in roles:
# Admin always has rights
return (self.ALL, self.ALL)
c = c or self.controller
if default is None:
if self.auth.s3_logged_in():
default = (self.ALL, self.ALL)
else:
default = (self.READ, self.READ)
# Already loaded?
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
table_acl = self.table_acls.get((tablename, require_org), None)
if table_acl is None:
q = ((t.deleted != True) & \
(t.tablename == tablename) &
((t.controller == c) | (t.controller == None)))
if roles:
query = (t.group_id.belongs(roles)) & q
else:
query = (t.group_id == None) & q
# Additional restrictions in OrgAuth
if policy == 6 and require_org:
field = t.organisation
query &= ((t.all_organisations == True) | \
(field == require_org) | (field == None))
rows = current.db(query).select()
table_acl = [(r.oacl, r.uacl) for r in rows]
if table_acl:
# ACL found, apply most permissive role
table_acl = self.most_permissive(table_acl)
else:
# No ACL found for any of the roles, fall back to default
table_acl = default
# Remember this result
self.table_acls.update({(tablename, require_org): table_acl})
return table_acl
# -------------------------------------------------------------------------
def get_owners(self, table, record):
"""
Get the organisation/group/user owning a record
@param table: the table
@param record: the record ID (or the Row, if already loaded)
"""
owner_org = None
owner_role = None
owner_user = None
record_id = None
# Check which ownership fields the table defines
ownership_fields = ("owned_by_user",
"owned_by_group",
"owned_by_organisation")
fields = [f for f in ownership_fields if f in table.fields]
if not fields:
# Ownership is not defined for this table
return (None, None, None)
if isinstance(record, Row):
# Check if all necessary fields are present
missing = [f for f in fields if f not in record]
if missing:
# Have to reload the record :(
if table._id.name in record:
record_id = record[table._id.name]
record = None
else:
# Record ID given
record_id = record
record = None
if not record and record_id:
# Get the record
fs = [table[f] for f in fields] + [table.id]
query = (table._id == record_id)
record = current.db(query).select(limitby=(0, 1), *fs).first()
if not record:
# Record does not exist
return (None, None, None)
if "owned_by_group" in record:
owner_role = record["owned_by_group"]
if "owned_by_user" in record:
owner_user = record["owned_by_user"]
if "owned_by_organisation" in record:
owner_org = record["owned_by_organisation"]
return (owner_role, owner_user, owner_org)
# -------------------------------------------------------------------------
def is_owner(self, table, record,
owner_role=None,
owner_user=None,
owner_org=None):
"""
Establish the ownership of a record
@param table: the table
@param record: the record ID (or the Row if already loaded)
@param owner_role: owner_role of the record (if already known)
@param owner_user: owner_user of the record (if already known)
@param owner_org: owner_org of the record (if already known)
@note: if passing a Row, it must contain all available ownership
fields (id, owned_by_user, owned_by_group), otherwise the
record will be re-loaded by this function
"""
user_id = None
roles = []
sr = self.auth.get_system_roles()
if self.auth.user is not None:
user_id = self.auth.user.id
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if not user_id and not roles:
return False
elif sr.ADMIN in roles:
# Admin owns all records
return True
elif record:
owner_role, owner_user, owner_org = \
self.get_owners(table, record)
try:
record_id = record.id
except:
record_id = record
# Session ownership?
if not user_id:
if not owner_user and record_id and \
self.auth.s3_session_owns(table, record_id):
# Session owns record
return True
else:
return False
# Individual record ownership
if owner_user and owner_user == user_id:
return True
# OrgAuth?
if self.policy == 6 and owner_org:
# Must have the organisation's staff role
if owner_org not in roles:
return False
# Owner?
if not owner_role and not owner_user:
# All authenticated users own this record
return True
elif owner_role and owner_role in roles:
# user has owner role
return True
else:
return False
# -------------------------------------------------------------------------
def hidden_modules(self):
"""
List of modules to hide from the main menu
"""
sr = self.auth.get_system_roles()
hidden_modules = []
if self.use_cacls:
restricted_modules = [m for m in self.modules
if self.modules[m].restricted]
roles = []
if current.session.s3 is not None:
roles = current.session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
return []
if not roles:
hidden_modules = restricted_modules
else:
t = self.table
query = (t.deleted != True) & \
(t.controller.belongs(restricted_modules)) & \
(t.tablename == None)
if roles:
query = query & (t.group_id.belongs(roles))
else:
query = query & (t.group_id == None)
rows = current.db(query).select()
acls = dict()
for acl in rows:
if acl.controller not in acls:
acls[acl.controller] = self.NONE
acls[acl.controller] |= acl.oacl | acl.uacl
hidden_modules = [m for m in restricted_modules
if m not in acls or not acls[m]]
return hidden_modules
# -------------------------------------------------------------------------
def accessible_url(self,
c=None,
f=None,
p=None,
t=None,
a=None,
args=[],
vars={},
anchor="",
extension=None,
env=None):
"""
Return a URL only if accessible by the user, otherwise False
@param c: the controller
@param f: the function
@param p: the permission (defaults to READ)
@param t: the tablename (defaults to <c>_<f>)
@param a: the application name
@param args: the URL arguments
@param vars: the URL variables
@param anchor: the anchor (#) of the URL
@param extension: the request format extension
@param env: the environment
"""
required = self.METHODS
if p in required:
permission = required[p]
else:
permission = self.READ
if not c:
c = self.controller
if not f:
f = self.function
if t is None:
tablename = "%s_%s" % (c, f)
else:
tablename = t
# Hide disabled modules
if self.modules and c not in self.modules:
return False
permitted = True
if not self.auth.override:
if self.use_cacls:
acl = self(c=c, f=f, table=tablename)
if acl & permission != permission:
permitted = False
else:
if permission != self.READ:
permitted = self.auth.s3_logged_in()
if permitted:
return URL(a=a,
c=c,
f=f,
args=args,
vars=vars,
anchor=anchor,
extension=extension,
env=env)
else:
return False
# -------------------------------------------------------------------------
def page_restricted(self, c=None, f=None):
"""
Checks whether a page is restricted (=whether ACLs
are to be applied)
@param c: controller
@param f: function
"""
page = "%s/%s" % (c, f)
if page in self.unrestricted_pages:
return False
elif c not in self.modules or \
c in self.modules and not self.modules[c].restricted:
return False
return True
# -------------------------------------------------------------------------
def applicable_acls(self, roles, racl, c=None, f=None, t=None):
"""
Get the available ACLs for the particular situation
@param roles: the roles of the current user
@param racl: the required ACL
@param c: controller
@param f: function
@param t: tablename
@returns: None for no ACLs to apply (access granted), [] for
no ACLs matching the required permissions (access
denied), or a list of ACLs to apply.
"""
db = current.db
table = self.table
if not self.use_cacls:
# We do not use ACLs at all
return None
c = c or self.controller
f = f or self.function
if self.page_restricted(c=c, f=f):
page_restricted = True
else:
page_restricted = False
# Get page ACLs
page_acls = None
if page_restricted:
# Base query
query = (table.deleted != True) & \
(table.function == None)
if f and self.use_facls:
query = (query | (table.function == f))
query &= (table.controller == c)
# Do not use delegated ACLs except for policy 6
if self.policy != 6:
query &= (table.organisation == None)
# Restrict to available roles
if roles:
query &= (table.group_id.belongs(roles))
else:
query &= (table.group_id == None)
page_acls = db(query).select(table.ALL)
if page_acls:
if f and self.use_facls:
facl = [acl for acl in page_acls if acl.function != None]
if facl:
page_acls = facl
page_acls = [acl for acl in page_acls
if (acl.uacl & racl == racl or
acl.oacl & racl == racl)]
else:
# Page is restricted, but no permitting ACL
# available for this set of roles => no access
return []
# Get table ACLs
table_acls = []
if t and self.use_tacls:
# Base query
query = ((table.deleted != True) & \
(table.controller == None) & \
(table.function == None) &
(table.tablename == t))
# Is the table restricted at all?
restricted = db(query).select(limitby=(0, 1)).first() is not None
# Do not use delegated ACLs except for policy 6
if self.policy != 6:
query &= (table.organisation == None)
# Restrict to available roles
if roles:
query = (table.group_id.belongs(roles)) & query
else:
query = (table.group_id == None) & query
table_acls = db(query).select(table.ALL)
if restricted and table_acls:
# if the table is restricted and there are ACLs
# available for this set of roles, then deny access
# if none of the ACLs gives the required permissions
_debug("acls: %s" % table_acls)
default = []
else:
# otherwise, if the table is unrestricted or there are
# no restricting ACLs for this set of roles, then grant
# access as per page_acls
default = page_acls
# Find matches
table_acls = [acl for acl in table_acls
if (acl.uacl & racl == racl or
acl.oacl & racl == racl)]
if table_acls:
# Found matching table ACLs, grant access
return table_acls
else:
# No matching table ACLs found
return default
# default:
return page_acls
# -------------------------------------------------------------------------
def accessible_query(self, table, *methods):
"""
Query for records which the user is permitted to access
with methods
Example::
query = auth.permission.accessible_query(table,
"read", "update")
- requests a query for records that can be both read and
updated.
@param table: the DB table
@param methods: list of methods for which permission is
required (AND), any combination of "create",
"read", "update", "delete"
"""
_debug("accessible_query(%s, %s)" % (table, methods))
session = current.session
policy = self.policy
required = self.METHODS
sr = self.auth.get_system_roles()
OWNED_BY_ORG = "owned_by_organisation"
OWNED_BY_USER = "owned_by_user"
OWNED_BY_GROUP = "owned_by_group"
ALL_ORGS = "all_organisations"
# Default queries
query = (table._id != None)
no_access = (table._id == None)
# Required ACL
racl = reduce(lambda a, b: a | b,
[required[m]
for m in methods if m in required],
self.NONE)
if not racl:
_debug("No permission specified, query=%s" % query)
return query
# User & Roles
user_id = None
if self.auth.user is not None:
user_id = self.auth.user.id
roles = []
if session.s3 is not None:
roles = session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
_debug("Admin/Editor in Roles, query=%s" % query)
return query
# Org roles the user has
org_roles = []
all_orgs = False
if policy == 6:
org_roles = list(roles)
# Applicable ACLs
acls = self.applicable_acls(roles, racl, t=table)
permitted = False
ownership_required = True
if acls is None:
permitted = True
ownership_required = False
elif acls:
permitted = True
for acl in acls:
_debug("ACL: oacl=%04x uacl=%04x" % (acl.oacl, acl.uacl))
if acl.uacl & racl == racl:
ownership_required = False
_debug("uACL found - no ownership required")
if policy == 6:
org_role = acl.organisation
if acl[ALL_ORGS]:
all_orgs = True
elif org_role and org_role not in org_roles:
org_roles.append(org_role)
if not permitted:
_debug("No access")
return no_access
_debug("ownership_required=%s" % ownership_required)
# Query fragments
if OWNED_BY_ORG in table:
has_org_role = ((table[OWNED_BY_ORG] == None) | \
(table[OWNED_BY_ORG].belongs(org_roles)))
if OWNED_BY_USER in table:
user_owns_record = (table[OWNED_BY_USER] == user_id)
# OrgAuth
q = None
if policy == 6 and OWNED_BY_ORG in table and not all_orgs:
q = has_org_role
if user_id and OWNED_BY_USER in table:
q |= user_owns_record
if q is not None:
query = q
if ownership_required:
if not user_id:
query = (table._id == None)
if OWNED_BY_USER in table:
try:
records = session.owned_records.get(table._tablename,
None)
except:
pass
else:
if records:
query = (table._id.belongs(records))
else:
qowner = qrole = quser = None
if OWNED_BY_GROUP in table:
qrole = (table.owned_by_group.belongs(roles))
if OWNED_BY_USER in table and user_id:
quser = (table.owned_by_user == user_id)
if qrole is not None:
qowner = qrole
if quser is not None:
if qowner is not None:
qowner = (qowner | quser)
else:
qowner = quser
if qowner is not None:
if query is not None:
query = query & qowner
else:
query = qowner
# Fallback
if query is None:
query = (table._id > 0)
_debug("Access granted, query=%s" % query)
return query
# -------------------------------------------------------------------------
def ownership_required(self, table, *methods):
"""
Check if record ownership is required for a method
@param table: the table
@param methods: methods to check (OR)
@status: deprecated, using applicable_acls instead
"""
sr = self.auth.get_system_roles()
roles = []
if current.session.s3 is not None:
# No ownership required in policies without ACLs
if not self.use_cacls:
return False
roles = current.session.s3.roles or []
if sr.ADMIN in roles or sr.EDITOR in roles:
return False # Admins and Editors do not need to own a record
required = self.METHODS
racl = reduce(lambda a, b: a | b,
[required[m] for m in methods if m in required],
self.NONE)
if not racl:
return False
# Available ACLs
pacl = self.page_acl()
if not self.use_tacls:
acl = pacl
else:
tacl = self.table_acl(table)
acl = (tacl[0] & pacl[0], tacl[1] & pacl[1])
# Ownership required?
permitted = (acl[0] | acl[1]) & racl == racl
ownership_required = False
if not permitted:
pkey = table.fields[0]
query = (table[pkey] == None)
elif "owned_by_group" in table or "owned_by_user" in table:
ownership_required = permitted and acl[1] & racl != racl
return ownership_required
# -------------------------------------------------------------------------
def has_permission(self, table, record=None, method=None):
"""
Check permission to access a record
@param table: the table
@param record: the record or record ID (None for any record)
@param method: the method (or tuple/list of methods),
any of "create", "read", "update", "delete"
@note: when submitting a record, the record ID and the ownership
fields (="owned_by_user", "owned_by_group") must be contained
if available, otherwise the record will be re-loaded
"""
_debug("has_permission(%s, %s, method=%s)" %
(table, record, method))
required = self.METHODS
if not isinstance(method, (list, tuple)):
method = [method]
# Required ACL
racl = reduce(lambda a, b: a | b,
[required[m] for m in method if m in required], self.NONE)
# Available ACL
aacl = self(table=table, record=record)
permitted = racl & aacl == racl
_debug("permitted=%s" % permitted)
return permitted
# -------------------------------------------------------------------------
def permitted_facilities(self,
table=None,
error_msg=None,
redirect_on_error=True,
facility_type=None):
"""
If there are no facilities that the user has permission for,
prevents create & update of records in table & gives a
warning if the user tries to.
@param table: the table or table name
@param error_msg: error message
@param redirect_on_error: whether to redirect on error
@param facility_type: restrict to this particular type of
facilities (a tablename)
"""
db = current.db
s3db = current.s3db
T = current.T
ERROR = T("You do not have permission for any facility to perform this action.")
HINT = T("Create a new facility or ensure that you have permissions for an existing facility.")
if not error_msg:
error_msg = ERROR
site_ids = []
if facility_type is None:
site_types = self.auth.org_site_types
else:
if facility_type not in self.auth.org_site_types:
return
site_types = [s3db[facility_type]]
for site_type in site_types:
try:
ftable = s3db[site_type]
if not "site_id" in ftable.fields:
continue
query = self.auth.s3_accessible_query("update", ftable)
if "deleted" in ftable:
query &= (ftable.deleted != True)
rows = db(query).select(ftable.site_id)
site_ids += [row.site_id for row in rows]
except:
# Module disabled
pass
if site_ids:
return site_ids
args = current.request.args
if "update" in args or "create" in args:
if redirect_on_error:
# Trying to create or update
# If they do no have permission to any facilities
current.session.error = "%s %s" % (error_msg, HINT)
redirect(URL(c="default", f="index"))
elif table is not None:
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
current.manager.configure(tablename, insertable = False)
return []
# -------------------------------------------------------------------------
def permitted_organisations(self,
table=None,
error_msg=None,
redirect_on_error=True):
"""
If there are no organisations that the user has update
permission for, prevents create & update of a record in
table & gives an warning if the user tries to.
@param table: the table or table name
@param error_msg: error message
@param redirect_on_error: whether to redirect on error
"""
db = current.db
s3db = current.s3db
manager = current.manager
T = current.T
ERROR = T("You do not have permission for any organization to perform this action.")
HINT = T("Create a new organization or ensure that you have permissions for an existing organization.")
if not error_msg:
error_msg = ERROR
org_table = s3db.org_organisation
query = self.auth.s3_accessible_query("update", org_table)
query &= (org_table.deleted == False)
rows = db(query).select(org_table.id)
if rows:
return [org.id for org in rows]
request = current.request
if "update" in request.args or "create" in request.args:
if redirect_on_error:
manager.session.error = error_msg + " " + HINT
redirect(URL(c="default", f="index"))
elif table is not None:
if hasattr(table, "_tablename"):
tablename = table._tablename
else:
tablename = table
manager.configure(tablename, insertable = False)
return []
# -------------------------------------------------------------------------
def fail(self):
"""
Action upon insufficient permissions
"""
if self.format == "html":
# HTML interactive request => flash message + redirect
if self.auth.s3_logged_in():
current.session.error = self.INSUFFICIENT_PRIVILEGES
redirect(self.homepage)
else:
current.session.error = self.AUTHENTICATION_REQUIRED
redirect(self.loginpage)
else:
# non-HTML request => raise proper HTTP error
if self.auth.s3_logged_in():
raise HTTP(403, body=self.INSUFFICIENT_PRIVILEGES)
else:
raise HTTP(401, body=self.AUTHENTICATION_REQUIRED)
# =============================================================================
class S3Audit(object):
"""
S3 Audit Trail Writer Class
@author: Dominic König <[email protected]>
"""
def __init__(self,
tablename="s3_audit",
migrate=True,
fake_migrate=False):
"""
Constructor
@param tablename: the name of the audit table
@param migrate: migration setting
@note: this defines the audit table
"""
db = current.db
self.table = db.get(tablename, None)
if not self.table:
self.table = db.define_table(tablename,
Field("timestmp", "datetime"),
Field("person", "integer"),
Field("operation"),
Field("tablename"),
Field("record", "integer"),
Field("representation"),
Field("old_value", "text"),
Field("new_value", "text"),
migrate=migrate,
fake_migrate=fake_migrate)
session = current.session
self.auth = session.auth
if session.auth and session.auth.user:
self.user = session.auth.user.id
else:
self.user = None
self.diff = None
# -------------------------------------------------------------------------
def __call__(self, operation, prefix, name,
form=None,
record=None,
representation="unknown"):
"""
Audit
@param operation: Operation to log, one of
"create", "update", "read", "list" or "delete"
@param prefix: the module prefix of the resource
@param name: the name of the resource (without prefix)
@param form: the form
@param record: the record ID
@param representation: the representation format
"""
settings = current.session.s3
#print >>sys.stderr, "Audit %s: %s_%s record=%s representation=%s" % \
#(operation, prefix, name, record, representation)
now = datetime.datetime.utcnow()
db = current.db
table = self.table
tablename = "%s_%s" % (prefix, name)
if record:
if isinstance(record, Row):
record = record.get("id", None)
if not record:
return True
try:
record = int(record)
except ValueError:
record = None
elif form:
try:
record = form.vars["id"]
except:
try:
record = form["id"]
except:
record = None
if record:
try:
record = int(record)
except ValueError:
record = None
else:
record = None
if operation in ("list", "read"):
if settings.audit_read:
table.insert(timestmp = now,
person = self.user,
operation = operation,
tablename = tablename,
record = record,
representation = representation)
elif operation in ("create", "update"):
if settings.audit_write:
if form:
record = form.vars.id
new_value = ["%s:%s" % (var, str(form.vars[var]))
for var in form.vars]
else:
new_value = []
table.insert(timestmp = now,
person = self.user,
operation = operation,
tablename = tablename,
record = record,
representation = representation,
new_value = new_value)
self.diff = None
elif operation == "delete":
if settings.audit_write:
query = db[tablename].id == record
row = db(query).select(limitby=(0, 1)).first()
old_value = []
if row:
old_value = ["%s:%s" % (field, row[field])
for field in row]
table.insert(timestmp = now,
person = self.user,
operation = operation,
tablename = tablename,
record = record,
representation = representation,
old_value = old_value)
self.diff = None
return True
# =============================================================================
class S3RoleManager(S3Method):
"""
REST Method to manage ACLs (Role Manager UI for administrators)
@todo: does not handle org-wise role assignment or
delegation of permissions yet.
"""
# Controllers to hide from the permissions matrix
HIDE_CONTROLLER = ("admin", "default")
# Roles to hide from the permissions matrix
# @todo: deprecate
HIDE_ROLES = (1, 4)
# Undeletable roles
# @todo: deprecate
PROTECTED_ROLES = (1, 2, 3, 4, 5)
controllers = Storage()
# -------------------------------------------------------------------------
def apply_method(self, r, **attr):
"""
Apply role manager
"""
method = self.method
manager = current.manager
if method == "list":
output = self._list(r, **attr)
elif method in ("read", "create", "update"):
output = self._edit(r, **attr)
elif method == "delete":
output = self._delete(r, **attr)
elif method == "roles" and r.name == "user":
output = self._roles(r, **attr)
elif method == "users":
output = self._users(r, **attr)
else:
r.error(405, manager.ERROR.BAD_METHOD)
if r.http == "GET" and method not in ("create", "update", "delete"):
current.session.s3.cancel = r.url()
return output
# -------------------------------------------------------------------------
def _list(self, r, **attr):
"""
List roles/permissions
"""
output = dict()
request = self.request
response = current.response
resource = self.resource
manager = current.manager
auth = manager.auth
db = current.db
table = self.table
T = current.T
if r.id:
return self._edit(r, **attr)
# Show permission matrix?
show_matrix = request.get_vars.get("matrix", False) and True
if r.interactive:
# Title and subtitle
output.update(title = T("List of Roles"))
# System roles
query = ((table.deleted != True) & \
(table.system == True))
rows = db(query).select(table.id)
system_roles = [row.id for row in rows]
# Protected roles
query = ((table.deleted != True) & \
(table.protected == True))
rows = db(query).select(table.id)
protected_roles = [row.id for row in rows]
# Filter out hidden roles
resource.add_filter((~(table.id.belongs(self.HIDE_ROLES))) &
(table.hidden != True))
resource.load()
# Get active controllers
controllers = [c for c in self.controllers.keys()
if c not in self.HIDE_CONTROLLER]
# ACLs
acl_table = auth.permission.table
query = resource.get_query()
query = query & \
(acl_table.group_id == self.table.id) & \
(acl_table.deleted != True)
records = db(query).select(acl_table.ALL)
any = "ANY"
acls = Storage({any: Storage()})
for acl in records:
c = acl.controller
f = acl.function
if not f:
f = any
role_id = acl.group_id
if f not in acls:
acls[f] = Storage()
if c not in acls[f]:
acls[f][c] = Storage()
acls[f][c][str(role_id)] = Storage(oacl = acl.oacl,
uacl = acl.uacl)
for c in controllers:
if c not in acls[any]:
acls[any][c] = Storage()
if any not in acls[any][c]:
acls[any][c][any] = Storage(oacl = auth.permission.NONE,
uacl = auth.permission.NONE)
# Table header
columns = []
headers = [TH("ID"), TH(T("Role"))]
if show_matrix:
for c in controllers:
if c in acls[any]:
headers.append(TH(self.controllers[c].name_nice))
columns.append((c, any))
for f in acls:
if f != any and c in acls[f]:
headers.append(TH(self.controllers[c].name_nice,
BR(), f))
columns.append((c, f))
else:
headers += [TH(T("Description"))]
thead = THEAD(TR(headers))
# Table body
trows = []
i = 1
for role in resource:
role_id = role.id
role_name = role.role
role_desc = role.description
edit_btn = A(T("Edit"),
_href=URL(c="admin", f="role",
args=[role_id], vars=request.get_vars),
_class="action-btn")
users_btn = A(T("Users"),
_href=URL(c="admin", f="role",
args=[role_id, "users"]),
_class="action-btn")
if role.protected:
tdata = [TD(edit_btn,
XML(" "),
users_btn),
TD(role_name)]
else:
delete_btn = A(T("Delete"),
_href=URL(c="admin", f="role",
args=[role_id, "delete"],
vars=request.get_vars),
_class="delete-btn")
tdata = [TD(edit_btn,
XML(" "),
users_btn,
XML(" "),
delete_btn),
TD(role_name)]
if show_matrix:
# Display the permission matrix
for c, f in columns:
if f in acls and c in acls[f] and \
str(role_id) in acls[f][c]:
oacl = acls[f][c][str(role_id)].oacl
uacl = acls[f][c][str(role_id)].uacl
else:
oacl = acls[any][c][any].oacl
uacl = acls[any][c][any].oacl
oaclstr = ""
uaclstr = ""
options = auth.permission.PERMISSION_OPTS
NONE = auth.permission.NONE
for o in options:
if o == NONE and oacl == NONE:
oaclstr = "%s%s" % (oaclstr, options[o][0])
elif oacl and oacl & o:
oaclstr = "%s%s" % (oaclstr, options[o][0])
else:
oaclstr = "%s-" % oaclstr
if o == NONE and uacl == NONE:
uaclstr = "%s%s" % (uaclstr, options[o][0])
elif uacl and uacl & o:
uaclstr = "%s%s" % (uaclstr, options[o][0])
else:
uaclstr = "%s-" % uaclstr
values = "%s (%s)" % (uaclstr, oaclstr)
tdata += [TD(values, _nowrap="nowrap")]
else:
# Display role descriptions
tdata += [TD(role_desc)]
_class = i % 2 and "even" or "odd"
trows.append(TR(tdata, _class=_class))
tbody = TBODY(trows)
# Aggregate list
items = TABLE(thead, tbody, _id="list", _class="dataTable display")
output.update(items=items, sortby=[[1, "asc"]])
# Add-button
add_btn = A(T("Add Role"), _href=URL(c="admin", f="role",
args=["create"]),
_class="action-btn")
output.update(add_btn=add_btn)
response.view = "admin/role_list.html"
response.s3.actions = []
response.s3.no_sspag = True
elif r.representation == "xls":
# Not implemented yet
r.error(501, manager.ERROR.BAD_FORMAT)
else:
r.error(501, manager.ERROR.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _edit(self, r, **attr):
"""
Create/update role
"""
output = dict()
request = self.request
session = current.session
manager = current.manager
db = current.db
T = current.T
crud_settings = manager.s3.crud
CACL = T("Application Permissions")
FACL = T("Function Permissions")
TACL = T("Table Permissions")
CANCEL = T("Cancel")
auth = manager.auth
model = manager.model
acl_table = auth.permission.table
if r.interactive:
# Get the current record (if any)
if r.record:
output.update(title=T("Edit Role"))
role_id = r.record.id
role_name = r.record.role
role_desc = r.record.description
else:
output.update(title=T("New Role"))
role_id = None
role_name = None
role_desc = None
# Form helpers ----------------------------------------------------
mandatory = lambda l: DIV(l, XML(" "),
SPAN("*", _class="req"))
acl_table.oacl.requires = IS_ACL(auth.permission.PERMISSION_OPTS)
acl_table.uacl.requires = IS_ACL(auth.permission.PERMISSION_OPTS)
acl_widget = lambda f, n, v: \
S3ACLWidget.widget(acl_table[f], v, _id=n, _name=n,
_class="acl-widget")
formstyle = crud_settings.formstyle
using_default = SPAN(T("using default"), _class="using-default")
delete_acl = lambda _id: _id is not None and \
A(T("Delete"),
_href = URL(c="admin", f="acl",
args=[_id, "delete"],
vars=dict(_next=r.url())),
_class = "delete-btn") or using_default
new_acl = SPAN(T("new ACL"), _class="new-acl")
# Role form -------------------------------------------------------
form_rows = formstyle("role_name",
mandatory("%s:" % T("Role Name")),
INPUT(value=role_name,
_name="role_name",
_type="text",
requires=IS_NOT_IN_DB(db,
"auth_group.role",
allowed_override=[role_name])),
"") + \
formstyle("role_desc",
"%s:" % T("Description"),
TEXTAREA(value=role_desc,
_name="role_desc",
_rows="4"),
"")
key_row = P(T("* Required Fields"), _class="red")
role_form = DIV(TABLE(form_rows), key_row, _id="role-form")
# Prepare ACL forms -----------------------------------------------
any = "ANY"
controllers = [c for c in self.controllers.keys()
if c not in self.HIDE_CONTROLLER]
ptables = []
query = (acl_table.deleted != True) & \
(acl_table.group_id == role_id)
records = db(query).select()
acl_forms = []
# Relevant ACLs
acls = Storage()
for acl in records:
if acl.controller in controllers:
if acl.controller not in acls:
acls[acl.controller] = Storage()
if not acl.function:
f = any
else:
if auth.permission.use_facls:
f = acl.function
else:
continue
acls[acl.controller][f] = acl
# Controller ACL table --------------------------------------------
# Table header
thead = THEAD(TR(TH(T("Application")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing ACLs
form_rows = []
i = 0
for c in controllers:
default = Storage(id = None,
controller = c,
function = any,
tablename = None,
uacl = auth.permission.NONE,
oacl = auth.permission.NONE)
if c in acls:
acl_list = acls[c]
if any not in acl_list:
acl_list[any] = default
else:
acl_list = Storage(ANY=default)
acl = acl_list[any]
_class = i % 2 and "even" or "odd"
i += 1
uacl = auth.permission.NONE
oacl = auth.permission.NONE
if acl.oacl is not None:
oacl = acl.oacl
if acl.uacl is not None:
uacl = acl.uacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s_%s_ANY_ANY" % (_id, c)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
cn = self.controllers[c].name_nice
form_rows.append(TR(TD(cn),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Tabs
tabs = [SPAN(A(CACL), _class="tab_here")]
if auth.permission.use_facls:
_class = auth.permission.use_tacls and \
"tab_other" or "tab_last"
tabs.append(SPAN(A(FACL, _class="facl-tab"), _class=_class))
if auth.permission.use_tacls:
tabs.append(SPAN(A(TACL, _class="tacl-tab"),
_class="tab_last"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="controller-acls"))
# Function ACL table ----------------------------------------------
if auth.permission.use_facls:
# Table header
thead = THEAD(TR(TH(T("Application")),
TH(T("Function")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing ACLs
form_rows = []
i = 0
for c in controllers:
if c in acls:
acl_list = acls[c]
else:
continue
keys = acl_list.keys()
keys.sort()
for f in keys:
if f == any:
continue
acl = acl_list[f]
_class = i % 2 and "even" or "odd"
i += 1
uacl = auth.permission.NONE
oacl = auth.permission.NONE
if acl.oacl is not None:
oacl = acl.oacl
if acl.uacl is not None:
uacl = acl.uacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s_%s_%s_ANY" % (_id, c, f)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
cn = self.controllers[c].name_nice
form_rows.append(TR(TD(cn),
TD(f),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Row to enter a new controller ACL
_class = i % 2 and "even" or "odd"
c_opts = [OPTION("", _value=None, _selected="selected")] + \
[OPTION(self.controllers[c].name_nice,
_value=c) for c in controllers]
c_select = SELECT(_name="new_controller", *c_opts)
form_rows.append(TR(
TD(c_select),
TD(INPUT(_type="text", _name="new_function")),
TD(acl_widget("uacl", "new_c_uacl", auth.permission.NONE)),
TD(acl_widget("oacl", "new_c_oacl", auth.permission.NONE)),
TD(new_acl), _class=_class))
# Tabs to change to the other view
tabs = [SPAN(A(CACL, _class="cacl-tab"),
_class="tab_other"),
SPAN(A(FACL), _class="tab_here")]
if auth.permission.use_tacls:
tabs.append(SPAN(A(TACL, _class="tacl-tab"),
_class="tab_last"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="function-acls"))
# Table ACL table -------------------------------------------------
if auth.permission.use_tacls:
query = (acl_table.deleted != True) & \
(acl_table.tablename != None)
tacls = db(query).select(acl_table.tablename, distinct=True)
if tacls:
ptables = [acl.tablename for acl in tacls]
# Relevant ACLs
acls = dict([(acl.tablename, acl) for acl in records
if acl.tablename in ptables])
# Table header
thead = THEAD(TR(TH(T("Tablename")),
TH(T("All Records")),
TH(T("Owned Records")),
TH()))
# Rows for existing table ACLs
form_rows = []
i = 0
for t in ptables:
_class = i % 2 and "even" or "odd"
i += 1
uacl = auth.permission.NONE
oacl = auth.permission.NONE
_id = None
if t in acls:
acl = acls[t]
if acl.uacl is not None:
uacl = acl.uacl
if acl.oacl is not None:
oacl = acl.oacl
_id = acl.id
delete_btn = delete_acl(_id)
n = "%s_ANY_ANY_%s" % (_id, t)
uacl = acl_widget("uacl", "acl_u_%s" % n, uacl)
oacl = acl_widget("oacl", "acl_o_%s" % n, oacl)
form_rows.append(TR(TD(t),
TD(uacl),
TD(oacl),
TD(delete_btn),
_class=_class))
# Row to enter a new table ACL
_class = i % 2 and "even" or "odd"
# @todo: find a better way to provide a selection of tables
#all_tables = [t._tablename for t in current.db]
form_rows.append(TR(
TD(INPUT(_type="text", _name="new_table")),
# @todo: doesn't work with conditional models
#requires=IS_EMPTY_OR(IS_IN_SET(all_tables,
#zero=None,
#error_message=T("Undefined Table"))))),
TD(acl_widget("uacl", "new_t_uacl", auth.permission.NONE)),
TD(acl_widget("oacl", "new_t_oacl", auth.permission.NONE)),
TD(new_acl), _class=_class))
# Tabs
tabs = [SPAN(A(CACL, _class="cacl-tab"),
_class="tab_other")]
if auth.permission.use_facls:
tabs.append(SPAN(A(FACL, _class="facl-tab"),
_class="tab_other"))
tabs.append(SPAN(A(TACL), _class="tab_here"))
acl_forms.append(DIV(DIV(tabs, _class="tabs"),
TABLE(thead, TBODY(form_rows)),
_id="table-acls"))
# Aggregate ACL Form ----------------------------------------------
acl_form = DIV(acl_forms, _id="table-container")
# Action row
if session.s3.cancel:
cancel = session.s3.cancel
else:
cancel = URL(c="admin", f="role",
vars=request.get_vars)
action_row = DIV(INPUT(_type="submit", _value=T("Save")),
A(CANCEL, _href=cancel, _class="action-lnk"),
_id="action-row")
# Complete form
form = FORM(role_form, acl_form, action_row)
# Append role_id
if role_id:
form.append(INPUT(_type="hidden",
_name="role_id",
value=role_id))
# Process the form ------------------------------------------------
if form.accepts(request.post_vars, session):
vars = form.vars
# Update the role
role = Storage(role=vars.role_name, description=vars.role_desc)
if r.record:
r.record.update_record(**role)
role_id = form.vars.role_id
session.confirmation = '%s "%s" %s' % (T("Role"),
role.role,
T("updated"))
else:
import uuid
role.uuid = uuid.uuid4()
role_id = self.table.insert(**role)
session.confirmation = '%s "%s" %s' % (T("Role"),
role.role,
T("created"))
if role_id:
# Collect the ACLs
acls = Storage()
for v in vars:
if v[:4] == "acl_":
acl_type, name = v[4:].split("_", 1)
n = name.split("_", 3)
i, c, f, t = map(lambda item: \
item != any and item or None, n)
if i.isdigit():
i = int(i)
else:
i = None
name = "%s_%s_%s" % (c, f, t)
if name not in acls:
acls[name] = Storage()
acls[name].update({"id": i,
"group_id": role_id,
"controller": c,
"function": f,
"tablename": t,
"%sacl" % acl_type: vars[v]})
for v in ("new_controller", "new_table"):
if v in vars and vars[v]:
c = v == "new_controller" and \
vars.new_controller or None
f = v == "new_controller" and \
vars.new_function or None
t = v == "new_table" and vars.new_table or None
name = "%s_%s_%s" % (c, f, t)
x = v == "new_table" and "t" or "c"
uacl = vars["new_%s_uacl" % x]
oacl = vars["new_%s_oacl" % x]
if name not in acls:
acls[name] = Storage()
acls[name].update(group_id=role_id,
controller=c,
function=f,
tablename=t,
oacl=oacl,
uacl=uacl)
# Save the ACLs
for acl in acls.values():
_id = acl.pop("id", None)
if _id:
query = (acl_table.deleted != True) & \
(acl_table.id == _id)
db(query).update(**acl)
elif acl.oacl or acl.uacl:
_id = acl_table.insert(**acl)
redirect(URL(f="role", vars=request.get_vars))
output.update(form=form)
if form.errors:
if "new_table" in form.errors:
output.update(acl="table")
elif "new_controller" in form.errors:
output.update(acl="function")
current.response.view = "admin/role_edit.html"
else:
r.error(501, manager.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _delete(self, r, **attr):
"""
Delete role
"""
session = current.session
manager = current.manager
request = self.request
T = current.T
auth = manager.auth
if r.interactive:
if r.record:
role = r.record
role_id = role.id
role_name = role.role
if role_id in self.PROTECTED_ROLES or \
role.protected or role.system:
session.error = '%s "%s" %s' % (T("Role"),
role_name,
T("cannot be deleted."))
redirect(URL(c="admin", f="role",
vars=request.get_vars))
else:
db = current.db
# Delete all ACLs for this role:
acl_table = auth.permission.table
query = (acl_table.deleted != True) & \
(acl_table.group_id == role_id)
db(query).update(deleted=True)
# Remove all memberships:
membership_table = db.auth_membership
query = (membership_table.deleted != True) & \
(membership_table.group_id == role_id)
db(query).update(deleted=True)
# Update roles in session:
session.s3.roles = [role
for role in session.s3.roles
if role != role_id]
# Remove role:
query = (self.table.deleted != True) & \
(self.table.id == role_id)
db(query).update(role=None,
deleted=True)
# Confirmation:
session.confirmation = '%s "%s" %s' % (T("Role"),
role_name,
T("deleted"))
else:
session.error = T("No role to delete")
else:
r.error(501, manager.BAD_FORMAT)
redirect(URL(c="admin", f="role", vars=request.get_vars))
# -------------------------------------------------------------------------
def _roles(self, r, **attr):
"""
View/Update roles of a user
"""
output = dict()
db = current.db
T = current.T
CANCEL = T("Cancel")
session = current.session
manager = current.manager
sr = session.s3.system_roles
request = self.request
crud_settings = manager.s3.crud
formstyle = crud_settings.formstyle
auth = manager.auth
gtable = auth.settings.table_group
mtable = auth.settings.table_membership
if r.interactive:
if r.record:
user = r.record
user_id = user.id
username = user.email
query = (mtable.deleted != True) &\
(mtable.user_id == user_id)
memberships = db(query).select()
memberships = Storage([(str(m.group_id), m.id)
for m in memberships])
roles = db(gtable.deleted != True).select(gtable.ALL)
roles = Storage([(str(g.id), " %s" % g.role)
for g in roles
if g.hidden != True and \
g.id not in (sr.ANONYMOUS,
sr.AUTHENTICATED)])
field = Storage(name="roles",
requires = IS_IN_SET(roles, multiple=True))
widget = CheckboxesWidgetS3.widget(field, memberships.keys())
if session.s3.cancel:
cancel = session.s3.cancel
else:
cancel = r.url(method="")
form = FORM(TABLE(
TR(TD(widget)),
TR(TD(INPUT(_type="submit", _value=T("Save")),
A(CANCEL,
_href=cancel, _class="action-lnk")))))
if form.accepts(request.post_vars, session):
assign = form.vars.roles
for role in roles:
query = (mtable.deleted != True) & \
(mtable.user_id == user_id) & \
(mtable.group_id == role)
_set = db(query)
if str(role) not in assign:
_set.update(deleted=True)
else:
membership = _set.select(limitby=(0, 1)).first()
if not membership:
mtable.insert(user_id=user_id, group_id=role)
session.confirmation = T("User Updated")
redirect(r.url(method=""))
output.update(title="%s - %s" %
(T("Assigned Roles"), username),
form=form)
current.response.view = "admin/user_roles.html"
else:
session.error = T("No user to update")
redirect(r.url(method=""))
else:
r.error(501, manager.BAD_FORMAT)
return output
# -------------------------------------------------------------------------
def _users(self, r, **attr):
"""
View/Update users of a role
"""
output = dict()
session = current.session
manager = current.manager
request = self.request
db = current.db
T = current.T
auth = manager.auth
utable = auth.settings.table_user
gtable = auth.settings.table_group
mtable = auth.settings.table_membership
if r.interactive:
if r.record:
role_id = r.record.id
role_name = r.record.role
role_desc = r.record.description
title = "%s: %s" % (T("Role"), role_name)
output.update(title=title,
description=role_desc,
group=role_id)
if auth.settings.username:
username = "username"
else:
username = "email"
# @todo: Audit
users = db().select(utable.ALL)
query = (mtable.deleted != True) & \
(mtable.group_id == role_id)
assigned = db(query).select(mtable.ALL)
assigned_users = [row.user_id for row in assigned]
unassigned_users = [(row.id, row)
for row in users
if row.id not in assigned_users]
# Delete form
if assigned_users:
thead = THEAD(TR(TH(),
TH(T("Name")),
TH(T("Username")),
TH(T("Remove?"))))
trows = []
i = 0
for user in users:
if user.id not in assigned_users:
continue
_class = i % 2 and "even" or "odd"
i += 1
trow = TR(TD(A(), _name="Id"),
TD("%s %s" % (user.first_name,
user.last_name)),
TD(user[username]),
TD(INPUT(_type="checkbox",
_name="d_%s" % user.id,
_class="remove_item")),
_class=_class)
trows.append(trow)
trows.append(TR(TD(), TD(), TD(),
TD(INPUT(_id="submit_delete_button",
_type="submit",
_value=T("Remove")))))
tbody = TBODY(trows)
del_form = TABLE(thead, tbody, _id="list",
_class="dataTable display")
else:
del_form = T("No users with this role")
del_form = FORM(DIV(del_form, _id="table-container"),
_name="del_form")
# Add form
uname = lambda u: \
"%s: %s %s" % (u.id, u.first_name, u.last_name)
u_opts = [OPTION(uname(u[1]),
_value=u[0]) for u in unassigned_users]
if u_opts:
u_opts = [OPTION("",
_value=None, _selected="selected")] + u_opts
u_select = DIV(TABLE(TR(
TD(SELECT(_name="new_user", *u_opts)),
TD(INPUT(_type="submit",
_id="submit_add_button",
_value=T("Add"))))))
else:
u_select = T("No further users can be added")
add_form = FORM(DIV(u_select), _name="add_form")
# Process delete form
if del_form.accepts(request.post_vars,
session, formname="del_form"):
del_ids = [v[2:] for v in del_form.vars
if v[:2] == "d_" and
del_form.vars[v] == "on"]
query = (mtable.deleted != True) & \
(mtable.group_id == role_id) & \
(mtable.user_id.belongs(del_ids))
db(query).update(deleted=True)
redirect(r.url())
# Process add form
if add_form.accepts(request.post_vars,
session, formname="add_form"):
if add_form.vars.new_user:
mtable.insert(group_id=role_id,
user_id=add_form.vars.new_user)
redirect(r.url())
form = DIV(H4(T("Users with this role")), del_form,
H4(T("Add new users")), add_form)
list_btn = A(T("Back to Roles List"),
_href=URL(c="admin", f="role"),
_class="action-btn")
edit_btn = A(T("Edit Role"),
_href=URL(c="admin", f="role",
args=[role_id]),
_class="action-btn")
output.update(form=form, list_btn=list_btn, edit_btn=edit_btn)
current.response.view = "admin/role_users.html"
else:
session.error = T("No role to update")
redirect(r.there())
else:
r.error(501, manager.BAD_FORMAT)
return output
# =============================================================================
class FaceBookAccount(OAuthAccount):
""" OAuth implementation for FaceBook """
AUTH_URL = "https://graph.facebook.com/oauth/authorize"
TOKEN_URL = "https://graph.facebook.com/oauth/access_token"
# -------------------------------------------------------------------------
def __init__(self):
from facebook import GraphAPI, GraphAPIError
self.GraphAPI = GraphAPI
self.GraphAPIError = GraphAPIError
g = dict(GraphAPI=GraphAPI,
GraphAPIError=GraphAPIError,
request=current.request,
response=current.response,
session=current.session,
HTTP=HTTP)
client = current.auth.settings.facebook
OAuthAccount.__init__(self, g, client["id"], client["secret"],
self.AUTH_URL, self.TOKEN_URL,
scope="email,user_about_me,user_location,user_photos,user_relationships,user_birthday,user_website,create_event,user_events,publish_stream")
self.graph = None
# -------------------------------------------------------------------------
def login_url(self, next="/"):
""" Overriding to produce a different redirect_uri """
request = current.request
session = current.session
if not self.accessToken():
if not request.vars.code:
session.redirect_uri = "%s/%s/default/facebook/login" % \
(current.deployment_settings.get_base_public_url(),
request.application)
data = dict(redirect_uri=session.redirect_uri,
response_type="code",
client_id=self.client_id)
if self.args:
data.update(self.args)
auth_request_url = self.auth_url + "?" + urlencode(data)
raise HTTP(307,
"You are not authenticated: you are being redirected to the <a href='" + auth_request_url + "'> authentication server</a>",
Location=auth_request_url)
else:
session.code = request.vars.code
self.accessToken()
#return session.code
return next
# -------------------------------------------------------------------------
def get_user(self):
""" Returns the user using the Graph API. """
db = current.db
auth = current.auth
session = current.session
if not self.accessToken():
return None
if not self.graph:
self.graph = self.GraphAPI((self.accessToken()))
user = None
try:
user = self.graph.get_object_c("me")
except self.GraphAPIError:
self.session.token = None
self.graph = None
if user:
# Check if a user with this email has already registered
#session.facebooklogin = True
table = auth.settings.table_user
query = (table.email == user["email"])
existent = db(query).select(table.id,
table.password,
limitby=(0, 1)).first()
if existent:
#session["%s_setpassword" % existent.id] = existent.password
_user = dict(first_name = user.get("first_name", ""),
last_name = user.get("last_name", ""),
facebookid = user["id"],
facebook = user.get("username", user["id"]),
email = user["email"],
password = existent.password
)
return _user
else:
# b = user["birthday"]
# birthday = "%s-%s-%s" % (b[-4:], b[0:2], b[-7:-5])
# if 'location' in user:
# session.flocation = user['location']
#session["is_new_from"] = "facebook"
auth.s3_send_welcome_email(user)
# auth.initial_user_permission(user) # Called on profile page
_user = dict(first_name = user.get("first_name", ""),
last_name = user.get("last_name", ""),
facebookid = user["id"],
facebook = user.get("username", user["id"]),
nickname = IS_SLUG()(user.get("username", "%(first_name)s-%(last_name)s" % user) + "-" + user['id'][:5])[0],
email = user["email"],
# birthdate = birthday,
about = user.get("bio", ""),
website = user.get("website", ""),
# gender = user.get("gender", "Not specified").title(),
photo_source = 3,
tagline = user.get("link", ""),
registration_type = 2,
)
return _user
# =============================================================================
class GooglePlusAccount(OAuthAccount):
"""
OAuth implementation for Google
https://code.google.com/apis/console/
"""
AUTH_URL = "https://accounts.google.com/o/oauth2/auth"
TOKEN_URL = "https://accounts.google.com/o/oauth2/token"
API_URL = "https://www.googleapis.com/oauth2/v1/userinfo"
# -------------------------------------------------------------------------
def __init__(self):
request = current.request
settings = current.deployment_settings
g = dict(request=request,
response=current.response,
session=current.session,
HTTP=HTTP)
client = current.auth.settings.google
self.globals = g
self.client = client
self.client_id = client["id"]
self.client_secret = client["secret"]
self.auth_url = self.AUTH_URL
self.args = dict(
scope = "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile",
user_agent = "google-api-client-python-plus-cmdline/1.0",
xoauth_displayname = settings.get_system_name(),
response_type = "code",
redirect_uri = "%s/%s/default/google/login" % \
(settings.get_base_public_url(),
request.application),
approval_prompt = "force",
state = "google"
)
self.graph = None
# -------------------------------------------------------------------------
def __build_url_opener(self, uri):
"""
Build the url opener for managing HTTP Basic Athentication
"""
# Create an OpenerDirector with support
# for Basic HTTP Authentication...
auth_handler = urllib2.HTTPBasicAuthHandler()
auth_handler.add_password(None,
uri,
self.client_id,
self.client_secret)
opener = urllib2.build_opener(auth_handler)
return opener
# -------------------------------------------------------------------------
def accessToken(self):
"""
Return the access token generated by the authenticating server.
If token is already in the session that one will be used.
Otherwise the token is fetched from the auth server.
"""
session = current.session
if session.token and session.token.has_key("expires"):
expires = session.token["expires"]
# reuse token until expiration
if expires == 0 or expires > time.time():
return session.token["access_token"]
if session.code:
data = dict(client_id = self.client_id,
client_secret = self.client_secret,
redirect_uri = self.args["redirect_uri"],
code = session.code,
grant_type = "authorization_code",
scope = "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile")
# if self.args:
# data.update(self.args)
open_url = None
opener = self.__build_url_opener(self.TOKEN_URL)
try:
open_url = opener.open(self.TOKEN_URL, urlencode(data))
except urllib2.HTTPError, e:
raise Exception(e.read())
finally:
del session.code # throw it away
if open_url:
try:
session.token = json.loads(open_url.read())
session.token["expires"] = int(session.token["expires_in"]) + \
time.time()
finally:
opener.close()
return session.token["access_token"]
session.token = None
return None
# -------------------------------------------------------------------------
def login_url(self, next="/"):
""" Overriding to produce a different redirect_uri """
request = current.request
session = current.session
if not self.accessToken():
if not request.vars.code:
session.redirect_uri = self.args["redirect_uri"]
data = dict(redirect_uri=session.redirect_uri,
response_type="code",
client_id=self.client_id)
if self.args:
data.update(self.args)
auth_request_url = self.auth_url + "?" + urlencode(data)
raise HTTP(307,
"You are not authenticated: you are being redirected to the <a href='" + auth_request_url + "'> authentication server</a>",
Location=auth_request_url)
else:
session.code = request.vars.code
self.accessToken()
#return session.code
return next
# -------------------------------------------------------------------------
def get_user(self):
""" Returns the user using the Graph API. """
db = current.db
auth = current.auth
session = current.session
if not self.accessToken():
return None
user = None
try:
user = self.call_api()
except Exception, e:
print str(e)
session.token = None
if user:
# Check if a user with this email has already registered
#session.googlelogin = True
table = auth.settings.table_user
query = (table.email == user["email"])
existent = db(query).select(table.id,
table.password,
limitby=(0, 1)).first()
if existent:
#session["%s_setpassword" % existent.id] = existent.password
_user = dict(
#first_name = user.get("given_name", user["name"]),
#last_name = user.get("family_name", user["name"]),
googleid = user["id"],
email = user["email"],
password = existent.password
)
return _user
else:
# b = user["birthday"]
# birthday = "%s-%s-%s" % (b[-4:], b[0:2], b[-7:-5])
# if "location" in user:
# session.flocation = user["location"]
#session["is_new_from"] = "google"
auth.s3_send_welcome_email(user)
_user = dict(
first_name = user.get("given_name", user["name"].split()[0]),
last_name = user.get("family_name", user["name"].split()[-1]),
googleid = user["id"],
nickname = "%(first_name)s-%(last_name)s-%(id)s" % dict(first_name=user["name"].split()[0].lower(), last_name=user["name"].split()[-1].lower(), id=user['id'][:5]),
email = user["email"],
# birthdate = birthday,
website = user.get("link", ""),
# gender = user.get("gender", "Not specified").title(),
photo_source = 6 if user.get("picture", None) else 2,
googlepicture = user.get("picture", ""),
registration_type = 3,
)
return _user
# -------------------------------------------------------------------------
def call_api(self):
api_return = urllib.urlopen("https://www.googleapis.com/oauth2/v1/userinfo?access_token=%s" % self.accessToken())
user = json.loads(api_return.read())
if user:
return user
else:
self.session.token = None
return None
# END =========================================================================<|fim▁end|> | |
<|file_name|>processPrivatePage.ts<|end_file_name|><|fim▁begin|>import {contact} from './contact';
import {getGenderFromName} from './getGenderFromName';
import {normalizeFbLink as normalizeLink} from './normalizeLink';
import {eduwork} from './eduwork';
import {hometown} from './hometown';
import {bio} from './bio';
import {favorites} from './favorites';
import {PersonProfile} from './interfaces/PersonProfile';
export const processPrivatePage = ($: CheerioStatic): PersonProfile => {
const _contact = contact($('#pagelet_contact'));
const name = $('#fbProfileCover h1').text();
const cover: Cheerio = $('#fbProfileCover h1 a');
return {
name,
'name-based-gender': getGenderFromName(name),
link: normalizeLink(String(cover ? cover.attr('href') : '')),
avatar: $('#fbTimelineHeadline .profilePicThumb img').attr('src'),
eduwork: eduwork($('#pagelet_eduwork')),
hometown: hometown($('#pagelet_hometown')),
bio: bio($('#pagelet_bio')),
contact: _contact,
favorites: favorites($('#favorites'))<|fim▁hole|>};<|fim▁end|> | }; |
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>use api::rpc::Error;
pub fn unauthenticated() -> Error {
Error {
code: 1,
message: "authentication is required for this request".to_owned(),
data: None,
}
}
pub fn already_authenticated() -> Error {
Error {
code: 2,<|fim▁hole|> data: None,
}
}
pub fn unauthorized_scope() -> Error {
Error {
code: 2,
message: "client not authorized to access scope".to_owned(),
data: None,
}
}<|fim▁end|> | message: "client is already authenticated".to_owned(), |
<|file_name|>thermal_1_0_0_temperature.py<|end_file_name|><|fim▁begin|># coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from pprint import pformat
from six import iteritems
class Thermal100Temperature(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
Thermal100Temperature - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'member_id': 'str',
'oem': 'ResourceOem',
'physical_context': 'PhysicalContext100PhysicalContext',
'related_item': 'list[Odata400IdRef]',
'related_itemodata_count': 'Odata400Count',
'related_itemodata_navigation_link': 'Odata400IdRef',
'status': 'ResourceStatus'
}
self.attribute_map = {
'member_id': 'MemberId',
'oem': 'Oem',
'physical_context': 'PhysicalContext',
'related_item': 'RelatedItem',
'related_itemodata_count': '[email protected]',
'related_itemodata_navigation_link': '[email protected]',
'status': 'Status'
}
self._member_id = None
self._oem = None
self._physical_context = None
self._related_item = None
self._related_itemodata_count = None
self._related_itemodata_navigation_link = None
self._status = None
@property
def member_id(self):
"""
Gets the member_id of this Thermal100Temperature.
This is the identifier for the member within the collection.
:return: The member_id of this Thermal100Temperature.
:rtype: str
"""
return self._member_id
@member_id.setter
def member_id(self, member_id):
"""
Sets the member_id of this Thermal100Temperature.
This is the identifier for the member within the collection.
:param member_id: The member_id of this Thermal100Temperature.
:type: str
"""
self._member_id = member_id
@property
def oem(self):
"""
Gets the oem of this Thermal100Temperature.
This is the manufacturer/provider specific extension moniker used to divide the Oem object into sections.
:return: The oem of this Thermal100Temperature.
:rtype: ResourceOem
"""
return self._oem
@oem.setter
def oem(self, oem):
"""
Sets the oem of this Thermal100Temperature.
This is the manufacturer/provider specific extension moniker used to divide the Oem object into sections.
:param oem: The oem of this Thermal100Temperature.
:type: ResourceOem
"""
self._oem = oem
@property
def physical_context(self):
"""
Gets the physical_context of this Thermal100Temperature.
Describes the area or device to which this temperature measurement applies.
:return: The physical_context of this Thermal100Temperature.
:rtype: PhysicalContext100PhysicalContext
"""
return self._physical_context
@physical_context.setter
def physical_context(self, physical_context):
"""
Sets the physical_context of this Thermal100Temperature.
Describes the area or device to which this temperature measurement applies.
:param physical_context: The physical_context of this Thermal100Temperature.
:type: PhysicalContext100PhysicalContext
"""
self._physical_context = physical_context
@property
def related_item(self):
"""
Gets the related_item of this Thermal100Temperature.
Describes the areas or devices to which this temperature measurement applies.
:return: The related_item of this Thermal100Temperature.
:rtype: list[Odata400IdRef]
"""
return self._related_item
@related_item.setter
def related_item(self, related_item):
"""
Sets the related_item of this Thermal100Temperature.
Describes the areas or devices to which this temperature measurement applies.
:param related_item: The related_item of this Thermal100Temperature.
:type: list[Odata400IdRef]
"""
self._related_item = related_item
@property
def related_itemodata_count(self):
"""
Gets the related_itemodata_count of this Thermal100Temperature.
:return: The related_itemodata_count of this Thermal100Temperature.
:rtype: Odata400Count
"""
return self._related_itemodata_count
@related_itemodata_count.setter
def related_itemodata_count(self, related_itemodata_count):
"""
Sets the related_itemodata_count of this Thermal100Temperature.
:param related_itemodata_count: The related_itemodata_count of this Thermal100Temperature.
:type: Odata400Count
"""
self._related_itemodata_count = related_itemodata_count
@property
def related_itemodata_navigation_link(self):
"""
Gets the related_itemodata_navigation_link of this Thermal100Temperature.
:return: The related_itemodata_navigation_link of this Thermal100Temperature.
:rtype: Odata400IdRef
"""
return self._related_itemodata_navigation_link
@related_itemodata_navigation_link.setter
def related_itemodata_navigation_link(self, related_itemodata_navigation_link):
"""
Sets the related_itemodata_navigation_link of this Thermal100Temperature.
:param related_itemodata_navigation_link: The related_itemodata_navigation_link of this Thermal100Temperature.
:type: Odata400IdRef
"""
self._related_itemodata_navigation_link = related_itemodata_navigation_link
@property
def status(self):
"""
Gets the status of this Thermal100Temperature.
:return: The status of this Thermal100Temperature.
:rtype: ResourceStatus<|fim▁hole|> return self._status
@status.setter
def status(self, status):
"""
Sets the status of this Thermal100Temperature.
:param status: The status of this Thermal100Temperature.
:type: ResourceStatus
"""
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other<|fim▁end|> | """ |
<|file_name|>session.py<|end_file_name|><|fim▁begin|>''' Wrapper around session access. Keep track of session madness as the app grows. '''
import logging
logger = logging.getLogger("pyfb")
FB_ACCESS_TOKEN = "fb_access_token"
FB_EXPIRES = "fb_expires"
FB_USER_ID = "fb_user_id"
def get_fb_access_token(request, warn=True):
token = request.session.get(FB_ACCESS_TOKEN)
if not token and warn:
logger.warn("pyfb: No access token found in session")
return token
def get_fb_expires(request, warn=True):
expires = request.session.get(FB_EXPIRES)
if not expires and warn:
logger.warn("pyfb: No 'expires' found in session")
return expires
def get_fb_user_id(request, warn=True):
user_id = request.session.get(FB_USER_ID)
if not user_id and warn:
logger.warn("pyfb: No user_id found in session")
return user_id
def set_fb_access_token(request, access_token):<|fim▁hole|> request.session[FB_ACCESS_TOKEN] = access_token
def set_fb_expires(request, expires):
request.session[FB_EXPIRES] = expires
def set_fb_user_id(request, user_id):
request.session[FB_USER_ID] = user_id<|fim▁end|> | |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![feature(libc)]
#[macro_use]
extern crate bitflags;
extern crate time;
extern crate libc;
use libc::c_int;
use std::io;
pub mod epoll;
pub mod kqueue;
pub mod event;
pub mod select;
pub fn errno(rv: c_int) -> io::Result<()> {
if rv < 0 {
return Err(io::Error::last_os_error());<|fim▁hole|>}
pub fn errno_rv(rv: c_int) -> io::Result<c_int> {
if rv < 0 {
return Err(io::Error::last_os_error());
}
Ok(rv)
}<|fim▁end|> | }
Ok(()) |
<|file_name|>LeaderServiceTask.java<|end_file_name|><|fim▁begin|>package com.bazaarvoice.emodb.common.dropwizard.leader;
import com.bazaarvoice.curator.recipes.leader.LeaderService;
import com.bazaarvoice.emodb.common.dropwizard.task.TaskRegistry;
import com.bazaarvoice.emodb.common.zookeeper.leader.PartitionedLeaderService;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.Maps;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.Service;
import com.google.inject.Inject;
import io.dropwizard.servlets.tasks.Task;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.PrintWriter;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentMap;
/**
* Shows the current status of leadership processes managed by {@link LeaderService}. Allows terminating
* individual leadership processes, but such that they can be restarted only by restarting the entire server.
*/
public class LeaderServiceTask extends Task {
private static final Logger _log = LoggerFactory.getLogger(LeaderServiceTask.class);
private final ConcurrentMap<String, LeaderService> _selectorMap = Maps.newConcurrentMap();
@Inject
public LeaderServiceTask(TaskRegistry tasks) {
super("leader");
tasks.addTask(this);
}
public void register(final String name, final LeaderService leaderService) {
_selectorMap.put(name, leaderService);
// Unregister automatically to avoid memory leaks.
leaderService.addListener(new AbstractServiceListener() {
@Override
public void terminated(Service.State from) {
unregister(name, leaderService);
}
@Override
public void failed(Service.State from, Throwable failure) {
unregister(name, leaderService);
}
}, MoreExecutors.sameThreadExecutor());
}
public void register(final String name, final PartitionedLeaderService partitionedLeaderService) {
int partition = 0;
for (LeaderService leaderService : partitionedLeaderService.getPartitionLeaderServices()) {
register(String.format("%s-%d", name, partition++), leaderService);
}
}
public void unregister(String name, LeaderService leaderService) {
_selectorMap.remove(name, leaderService);
}
@Override
public void execute(ImmutableMultimap<String, String> parameters, PrintWriter out) throws Exception {
// The 'release' argument tells a server to give up leadership and let a new leader be elected, possibly
// re-electing the current server. This is useful for rebalancing leader-controlled activities.
for (String name : parameters.get("release")) {
LeaderService leaderService = _selectorMap.get(name);
if (leaderService == null) {
out.printf("Unknown leader process: %s%n", name);
continue;
}
Service actualService = leaderService.getCurrentDelegateService().orNull();
if (actualService == null || !actualService.isRunning()) {
out.printf("Process is not currently elected leader: %s%n", name);
continue;
}
_log.warn("Temporarily releasing leadership for process: {}", name);
out.printf("Temporarily releasing leadership for process: %s, cluster will elect a new leader.%n", name);
actualService.stopAndWait();
}
// The 'terminate' argument tells a server to give up leadership permanently (or until the server restarts).
for (String name : parameters.get("terminate")) {
LeaderService leaderService = _selectorMap.get(name);
if (leaderService == null) {
out.printf("Unknown leader process: %s%n", name);
continue;
}
_log.warn("Terminating leader process for: {}", name);
out.printf("Terminating leader process for: %s. Restart the server to restart the leader process.%n", name);
leaderService.stopAndWait();
}
// Print current status.
for (Map.Entry<String, LeaderService> entry : new TreeMap<>(_selectorMap).entrySet()) {
String name = entry.getKey();
LeaderService leaderService = entry.getValue();
out.printf("%s: %s (leader=%s)%n", name,
describeState(leaderService.state(), leaderService.hasLeadership()),
getLeaderId(leaderService));
}
}
private String describeState(Service.State state, boolean hasLeadership) {
if (state == Service.State.RUNNING && !hasLeadership) {
return "waiting to win leadership election";
} else {
return state.name();
}
}
private String getLeaderId(LeaderService leaderService) {
try {<|fim▁hole|> return "<unknown>";
}
}
}<|fim▁end|> | return leaderService.getLeader().getId();
} catch (Exception e) { |
<|file_name|>vimeo.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
import base64
import functools
import json
import re
import itertools
from .common import InfoExtractor
from ..compat import (
compat_kwargs,
compat_HTTPError,
compat_str,
compat_urlparse,
)
from ..utils import (
clean_html,
determine_ext,
dict_get,
ExtractorError,
js_to_json,
int_or_none,
merge_dicts,
OnDemandPagedList,
parse_filesize,
RegexNotFoundError,
sanitized_Request,
smuggle_url,
std_headers,
str_or_none,
try_get,
unified_timestamp,
unsmuggle_url,
urlencode_postdata,
urljoin,
unescapeHTML,
)
class VimeoBaseInfoExtractor(InfoExtractor):
_NETRC_MACHINE = 'vimeo'
_LOGIN_REQUIRED = False
_LOGIN_URL = 'https://vimeo.com/log_in'
def _login(self):
username, password = self._get_login_info()
if username is None:
if self._LOGIN_REQUIRED:
raise ExtractorError('No login info available, needed for using %s.' % self.IE_NAME, expected=True)
return
webpage = self._download_webpage(
self._LOGIN_URL, None, 'Downloading login page')
token, vuid = self._extract_xsrft_and_vuid(webpage)
data = {
'action': 'login',
'email': username,
'password': password,
'service': 'vimeo',
'token': token,
}
self._set_vimeo_cookie('vuid', vuid)
try:
self._download_webpage(
self._LOGIN_URL, None, 'Logging in',
data=urlencode_postdata(data), headers={
'Content-Type': 'application/x-www-form-urlencoded',
'Referer': self._LOGIN_URL,
})
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 418:
raise ExtractorError(
'Unable to log in: bad username or password',
expected=True)
raise ExtractorError('Unable to log in')
def _verify_video_password(self, url, video_id, webpage):
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True)
token, vuid = self._extract_xsrft_and_vuid(webpage)
data = urlencode_postdata({
'password': password,
'token': token,
})
if url.startswith('http://'):
# vimeo only supports https now, but the user can give an http url
url = url.replace('http://', 'https://')
password_request = sanitized_Request(url + '/password', data)
password_request.add_header('Content-Type', 'application/x-www-form-urlencoded')
password_request.add_header('Referer', url)
self._set_vimeo_cookie('vuid', vuid)
return self._download_webpage(
password_request, video_id,
'Verifying the password', 'Wrong password')
def _extract_xsrft_and_vuid(self, webpage):
xsrft = self._search_regex(
r'(?:(?P<q1>["\'])xsrft(?P=q1)\s*:|xsrft\s*[=:])\s*(?P<q>["\'])(?P<xsrft>.+?)(?P=q)',
webpage, 'login token', group='xsrft')
vuid = self._search_regex(
r'["\']vuid["\']\s*:\s*(["\'])(?P<vuid>.+?)\1',
webpage, 'vuid', group='vuid')
return xsrft, vuid
def _extract_vimeo_config(self, webpage, video_id, *args, **kwargs):
vimeo_config = self._search_regex(
r'vimeo\.config\s*=\s*(?:({.+?})|_extend\([^,]+,\s+({.+?})\));',
webpage, 'vimeo config', *args, **compat_kwargs(kwargs))
if vimeo_config:
return self._parse_json(vimeo_config, video_id)
def _set_vimeo_cookie(self, name, value):
self._set_cookie('vimeo.com', name, value)
def _vimeo_sort_formats(self, formats):
# Bitrates are completely broken. Single m3u8 may contain entries in kbps and bps
# at the same time without actual units specified. This lead to wrong sorting.
self._sort_formats(formats, field_preference=('preference', 'height', 'width', 'fps', 'tbr', 'format_id'))
def _parse_config(self, config, video_id):
video_data = config['video']
video_title = video_data['title']
live_event = video_data.get('live_event') or {}
is_live = live_event.get('status') == 'started'
formats = []
config_files = video_data.get('files') or config['request'].get('files', {})
for f in config_files.get('progressive', []):
video_url = f.get('url')
if not video_url:
continue
formats.append({
'url': video_url,
'format_id': 'http-%s' % f.get('quality'),
'width': int_or_none(f.get('width')),
'height': int_or_none(f.get('height')),
'fps': int_or_none(f.get('fps')),
'tbr': int_or_none(f.get('bitrate')),
})
# TODO: fix handling of 308 status code returned for live archive manifest requests
for files_type in ('hls', 'dash'):
for cdn_name, cdn_data in config_files.get(files_type, {}).get('cdns', {}).items():
manifest_url = cdn_data.get('url')
if not manifest_url:
continue
format_id = '%s-%s' % (files_type, cdn_name)
if files_type == 'hls':
formats.extend(self._extract_m3u8_formats(
manifest_url, video_id, 'mp4',
'm3u8' if is_live else 'm3u8_native', m3u8_id=format_id,
note='Downloading %s m3u8 information' % cdn_name,
fatal=False))
elif files_type == 'dash':
mpd_pattern = r'/%s/(?:sep/)?video/' % video_id
mpd_manifest_urls = []
if re.search(mpd_pattern, manifest_url):
for suffix, repl in (('', 'video'), ('_sep', 'sep/video')):
mpd_manifest_urls.append((format_id + suffix, re.sub(
mpd_pattern, '/%s/%s/' % (video_id, repl), manifest_url)))
else:
mpd_manifest_urls = [(format_id, manifest_url)]
for f_id, m_url in mpd_manifest_urls:
if 'json=1' in m_url:
real_m_url = (self._download_json(m_url, video_id, fatal=False) or {}).get('url')
if real_m_url:
m_url = real_m_url
mpd_formats = self._extract_mpd_formats(
m_url.replace('/master.json', '/master.mpd'), video_id, f_id,
'Downloading %s MPD information' % cdn_name,
fatal=False)
for f in mpd_formats:
if f.get('vcodec') == 'none':
f['preference'] = -50
elif f.get('acodec') == 'none':
f['preference'] = -40
formats.extend(mpd_formats)
live_archive = live_event.get('archive') or {}
live_archive_source_url = live_archive.get('source_url')
if live_archive_source_url and live_archive.get('status') == 'done':
formats.append({
'format_id': 'live-archive-source',
'url': live_archive_source_url,
'preference': 1,
})
subtitles = {}
text_tracks = config['request'].get('text_tracks')
if text_tracks:
for tt in text_tracks:
subtitles[tt['lang']] = [{
'ext': 'vtt',
'url': urljoin('https://vimeo.com', tt['url']),
}]
thumbnails = []
if not is_live:
for key, thumb in video_data.get('thumbs', {}).items():
thumbnails.append({
'id': key,
'width': int_or_none(key),
'url': thumb,
})
thumbnail = video_data.get('thumbnail')
if thumbnail:
thumbnails.append({
'url': thumbnail,
})
owner = video_data.get('owner') or {}
video_uploader_url = owner.get('url')
return {
'id': str_or_none(video_data.get('id')) or video_id,
'title': self._live_title(video_title) if is_live else video_title,
'uploader': owner.get('name'),
'uploader_id': video_uploader_url.split('/')[-1] if video_uploader_url else None,
'uploader_url': video_uploader_url,
'thumbnails': thumbnails,
'duration': int_or_none(video_data.get('duration')),
'formats': formats,
'subtitles': subtitles,
'is_live': is_live,
}
def _extract_original_format(self, url, video_id):
download_data = self._download_json(
url, video_id, fatal=False,
query={'action': 'load_download_config'},
headers={'X-Requested-With': 'XMLHttpRequest'})
if download_data:
source_file = download_data.get('source_file')
if isinstance(source_file, dict):
download_url = source_file.get('download_url')
if download_url and not source_file.get('is_cold') and not source_file.get('is_defrosting'):
source_name = source_file.get('public_name', 'Original')
if self._is_valid_url(download_url, video_id, '%s video' % source_name):
ext = (try_get(
source_file, lambda x: x['extension'],
compat_str) or determine_ext(
download_url, None) or 'mp4').lower()
return {
'url': download_url,
'ext': ext,
'width': int_or_none(source_file.get('width')),
'height': int_or_none(source_file.get('height')),
'filesize': parse_filesize(source_file.get('size')),
'format_id': source_name,
'preference': 1,
}
class VimeoIE(VimeoBaseInfoExtractor):
"""Information extractor for vimeo.com."""
# _VALID_URL matches Vimeo URLs
_VALID_URL = r'''(?x)
https?://
(?:
(?:
www|
player
)
\.
)?
vimeo(?:pro)?\.com/
(?!(?:channels|album|showcase)/[^/?#]+/?(?:$|[?#])|[^/]+/review/|ondemand/)
(?:.*?/)?
(?:
(?:
play_redirect_hls|
moogaloop\.swf)\?clip_id=
)?
(?:videos?/)?
(?P<id>[0-9]+)
(?:/[\da-f]+)?
/?(?:[?&].*)?(?:[#].*)?$
'''
IE_NAME = 'vimeo'
_TESTS = [
{
'url': 'http://vimeo.com/56015672#at=0',
'md5': '8879b6cc097e987f02484baf890129e5',
'info_dict': {
'id': '56015672',
'ext': 'mp4',
'title': "youtube-dl test video - \u2605 \" ' \u5e78 / \\ \u00e4 \u21ad \U0001d550",
'description': 'md5:2d3305bad981a06ff79f027f19865021',
'timestamp': 1355990239,
'upload_date': '20121220',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user7108434',
'uploader_id': 'user7108434',
'uploader': 'Filippo Valsorda',
'duration': 10,
'license': 'by-sa',
},
'params': {
'format': 'best[protocol=https]',
},
},
{
'url': 'http://vimeopro.com/openstreetmapus/state-of-the-map-us-2013/video/68093876',
'md5': '3b5ca6aa22b60dfeeadf50b72e44ed82',
'note': 'Vimeo Pro video (#1197)',
'info_dict': {
'id': '68093876',
'ext': 'mp4',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/openstreetmapus',
'uploader_id': 'openstreetmapus',
'uploader': 'OpenStreetMap US',
'title': 'Andy Allan - Putting the Carto into OpenStreetMap Cartography',
'description': 'md5:2c362968038d4499f4d79f88458590c1',
'duration': 1595,
'upload_date': '20130610',
'timestamp': 1370893156,
},
'params': {
'format': 'best[protocol=https]',
},
},
{
'url': 'http://player.vimeo.com/video/54469442',
'md5': '619b811a4417aa4abe78dc653becf511',
'note': 'Videos that embed the url in the player page',
'info_dict': {
'id': '54469442',
'ext': 'mp4',
'title': 'Kathy Sierra: Building the minimum Badass User, Business of Software 2012',
'uploader': 'The BLN & Business of Software',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/theblnbusinessofsoftware',
'uploader_id': 'theblnbusinessofsoftware',
'duration': 3610,
'description': None,
},
'params': {
'format': 'best[protocol=https]',
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://vimeo.com/68375962',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'note': 'Video protected with password',
'info_dict': {
'id': '68375962',
'ext': 'mp4',
'title': 'youtube-dl password protected test video',
'timestamp': 1371200155,
'upload_date': '20130614',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user18948128',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
'duration': 10,
'description': 'md5:dca3ea23adb29ee387127bc4ddfce63f',
},
'params': {
'format': 'best[protocol=https]',
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/channels/keypeele/75629013',
'md5': '2f86a05afe9d7abc0b9126d229bbe15d',
'info_dict': {
'id': '75629013',
'ext': 'mp4',
'title': 'Key & Peele: Terrorist Interrogation',
'description': 'md5:8678b246399b070816b12313e8b4eb5c',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/atencio',
'uploader_id': 'atencio',
'uploader': 'Peter Atencio',
'channel_id': 'keypeele',
'channel_url': r're:https?://(?:www\.)?vimeo\.com/channels/keypeele',
'timestamp': 1380339469,
'upload_date': '20130928',
'duration': 187,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://vimeo.com/76979871',
'note': 'Video with subtitles',
'info_dict': {
'id': '76979871',
'ext': 'mp4',
'title': 'The New Vimeo Player (You Know, For Videos)',
'description': 'md5:2ec900bf97c3f389378a96aee11260ea',
'timestamp': 1381846109,
'upload_date': '20131015',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/staff',
'uploader_id': 'staff',
'uploader': 'Vimeo Staff',
'duration': 62,
}
},
{
# from https://www.ouya.tv/game/Pier-Solar-and-the-Great-Architects/
'url': 'https://player.vimeo.com/video/98044508',
'note': 'The js code contains assignments to the same variable as the config',
'info_dict': {
'id': '98044508',
'ext': 'mp4',
'title': 'Pier Solar OUYA Official Trailer',
'uploader': 'Tulio Gonçalves',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user28849593',
'uploader_id': 'user28849593',
},
},
{
# contains original format
'url': 'https://vimeo.com/33951933',
'md5': '53c688fa95a55bf4b7293d37a89c5c53',
'info_dict': {
'id': '33951933',
'ext': 'mp4',
'title': 'FOX CLASSICS - Forever Classic ID - A Full Minute',
'uploader': 'The DMCI',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/dmci',
'uploader_id': 'dmci',
'timestamp': 1324343742,
'upload_date': '20111220',
'description': 'md5:ae23671e82d05415868f7ad1aec21147',
},
},
{
# only available via https://vimeo.com/channels/tributes/6213729 and
# not via https://vimeo.com/6213729
'url': 'https://vimeo.com/channels/tributes/6213729',
'info_dict': {
'id': '6213729',
'ext': 'mp4',
'title': 'Vimeo Tribute: The Shining',
'uploader': 'Casey Donahue',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/caseydonahue',
'uploader_id': 'caseydonahue',
'channel_url': r're:https?://(?:www\.)?vimeo\.com/channels/tributes',
'channel_id': 'tributes',
'timestamp': 1250886430,
'upload_date': '20090821',
'description': 'md5:bdbf314014e58713e6e5b66eb252f4a6',
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
# redirects to ondemand extractor and should be passed through it
# for successful extraction
'url': 'https://vimeo.com/73445910',
'info_dict': {
'id': '73445910',
'ext': 'mp4',
'title': 'The Reluctant Revolutionary',
'uploader': '10Ft Films',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/tenfootfilms',
'uploader_id': 'tenfootfilms',
'description': 'md5:0fa704e05b04f91f40b7f3ca2e801384',
'upload_date': '20130830',
'timestamp': 1377853339,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],
},
{
'url': 'http://player.vimeo.com/video/68375962',
'md5': 'aaf896bdb7ddd6476df50007a0ac0ae7',
'info_dict': {
'id': '68375962',
'ext': 'mp4',
'title': 'youtube-dl password protected test video',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/user18948128',
'uploader_id': 'user18948128',
'uploader': 'Jaime Marquínez Ferrándiz',
'duration': 10,
},
'params': {
'format': 'best[protocol=https]',
'videopassword': 'youtube-dl',
},
},
{
'url': 'http://vimeo.com/moogaloop.swf?clip_id=2539741',
'only_matching': True,
},
{
'url': 'https://vimeo.com/109815029',
'note': 'Video not completely processed, "failed" seed status',
'only_matching': True,
},
{
'url': 'https://vimeo.com/groups/travelhd/videos/22439234',
'only_matching': True,
},
{
'url': 'https://vimeo.com/album/2632481/video/79010983',
'only_matching': True,
},
{
# source file returns 403: Forbidden
'url': 'https://vimeo.com/7809605',
'only_matching': True,
},
{
'url': 'https://vimeo.com/160743502/abd0e13fb4',
'only_matching': True,
}
# https://gettingthingsdone.com/workflowmap/
# vimeo embed with check-password page protected by Referer header
]
@staticmethod
def _smuggle_referrer(url, referrer_url):
return smuggle_url(url, {'http_headers': {'Referer': referrer_url}})
@staticmethod
def _extract_urls(url, webpage):
urls = []
# Look for embedded (iframe) Vimeo player
for mobj in re.finditer(
r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//player\.vimeo\.com/video/\d+.*?)\1',
webpage):
urls.append(VimeoIE._smuggle_referrer(unescapeHTML(mobj.group('url')), url))
PLAIN_EMBED_RE = (
# Look for embedded (swf embed) Vimeo player
r'<embed[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?vimeo\.com/moogaloop\.swf.+?)\1',
# Look more for non-standard embedded Vimeo player
r'<video[^>]+src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?vimeo\.com/[0-9]+)\1',
)
for embed_re in PLAIN_EMBED_RE:
for mobj in re.finditer(embed_re, webpage):
urls.append(mobj.group('url'))
return urls
@staticmethod
def _extract_url(url, webpage):
urls = VimeoIE._extract_urls(url, webpage)
return urls[0] if urls else None
def _verify_player_video_password(self, url, video_id, headers):
password = self._downloader.params.get('videopassword')
if password is None:
raise ExtractorError('This video is protected by a password, use the --video-password option', expected=True)
data = urlencode_postdata({
'password': base64.b64encode(password.encode()),
})
headers = merge_dicts(headers, {
'Content-Type': 'application/x-www-form-urlencoded',
})
checked = self._download_json(
url + '/check-password', video_id,
'Verifying the password', data=data, headers=headers)
if checked is False:
raise ExtractorError('Wrong video password', expected=True)
return checked
def _real_initialize(self):
self._login()
def _real_extract(self, url):
url, data = unsmuggle_url(url, {})
headers = std_headers.copy()
if 'http_headers' in data:
headers.update(data['http_headers'])
if 'Referer' not in headers:
headers['Referer'] = url
channel_id = self._search_regex(
r'vimeo\.com/channels/([^/]+)', url, 'channel id', default=None)
# Extract ID from URL
video_id = self._match_id(url)
orig_url = url
is_pro = 'vimeopro.com/' in url
is_player = '://player.vimeo.com/video/' in url
if is_pro:
# some videos require portfolio_id to be present in player url
# https://github.com/ytdl-org/youtube-dl/issues/20070
url = self._extract_url(url, self._download_webpage(url, video_id))
if not url:
url = 'https://vimeo.com/' + video_id
elif is_player:
url = 'https://player.vimeo.com/video/' + video_id
elif any(p in url for p in ('play_redirect_hls', 'moogaloop.swf')):
url = 'https://vimeo.com/' + video_id
try:
# Retrieve video webpage to extract further information
webpage, urlh = self._download_webpage_handle(
url, video_id, headers=headers)
redirect_url = urlh.geturl()
except ExtractorError as ee:
if isinstance(ee.cause, compat_HTTPError) and ee.cause.code == 403:
errmsg = ee.cause.read()
if b'Because of its privacy settings, this video cannot be played here' in errmsg:
raise ExtractorError(
'Cannot download embed-only video without embedding '
'URL. Please call youtube-dl with the URL of the page '
'that embeds this video.',
expected=True)
raise
# Now we begin extracting as much information as we can from what we
# retrieved. First we extract the information common to all extractors,
# and latter we extract those that are Vimeo specific.
self.report_extraction(video_id)
vimeo_config = self._extract_vimeo_config(webpage, video_id, default=None)
if vimeo_config:
seed_status = vimeo_config.get('seed_status', {})
if seed_status.get('state') == 'failed':
raise ExtractorError(
'%s said: %s' % (self.IE_NAME, seed_status['title']),
expected=True)
cc_license = None
timestamp = None
video_description = None
# Extract the config JSON
try:
try:
config_url = self._html_search_regex(
r' data-config-url="(.+?)"', webpage,
'config URL', default=None)
if not config_url:
# Sometimes new react-based page is served instead of old one that require
# different config URL extraction approach (see
# https://github.com/ytdl-org/youtube-dl/pull/7209)
page_config = self._parse_json(self._search_regex(
r'vimeo\.(?:clip|vod_title)_page_config\s*=\s*({.+?});',
webpage, 'page config'), video_id)
config_url = page_config['player']['config_url']
cc_license = page_config.get('cc_license')
timestamp = try_get(
page_config, lambda x: x['clip']['uploaded_on'],
compat_str)
video_description = clean_html(dict_get(
page_config, ('description', 'description_html_escaped')))
config = self._download_json(config_url, video_id)
except RegexNotFoundError:
# For pro videos or player.vimeo.com urls
# We try to find out to which variable is assigned the config dic
m_variable_name = re.search(r'(\w)\.video\.id', webpage)
if m_variable_name is not None:
config_re = [r'%s=({[^}].+?});' % re.escape(m_variable_name.group(1))]
else:
config_re = [r' = {config:({.+?}),assets:', r'(?:[abc])=({.+?});']
config_re.append(r'\bvar\s+r\s*=\s*({.+?})\s*;')
config_re.append(r'\bconfig\s*=\s*({.+?})\s*;')
config = self._search_regex(config_re, webpage, 'info section',
flags=re.DOTALL)
config = json.loads(config)
except Exception as e:
if re.search('The creator of this video has not given you permission to embed it on this domain.', webpage):
raise ExtractorError('The author has restricted the access to this video, try with the "--referer" option')
if re.search(r'<form[^>]+?id="pw_form"', webpage) is not None:
if '_video_password_verified' in data:
raise ExtractorError('video password verification failed!')
self._verify_video_password(redirect_url, video_id, webpage)
return self._real_extract(
smuggle_url(redirect_url, {'_video_password_verified': 'verified'}))
else:
raise ExtractorError('Unable to extract info section',
cause=e)
else:
if config.get('view') == 4:
config = self._verify_player_video_password(redirect_url, video_id, headers)
vod = config.get('video', {}).get('vod', {})
def is_rented():
if '>You rented this title.<' in webpage:
return True
if config.get('user', {}).get('purchased'):
return True
for purchase_option in vod.get('purchase_options', []):
if purchase_option.get('purchased'):
return True
label = purchase_option.get('label_string')
if label and (label.startswith('You rented this') or label.endswith(' remaining')):
return True
return False
if is_rented() and vod.get('is_trailer'):
feature_id = vod.get('feature_id')
if feature_id and not data.get('force_feature_id', False):
return self.url_result(smuggle_url(
'https://player.vimeo.com/player/%s' % feature_id,
{'force_feature_id': True}), 'Vimeo')
# Extract video description
if not video_description:
video_description = self._html_search_regex(
r'(?s)<div\s+class="[^"]*description[^"]*"[^>]*>(.*?)</div>',
webpage, 'description', default=None)
if not video_description:
video_description = self._html_search_meta(
'description', webpage, default=None)
if not video_description and is_pro:
orig_webpage = self._download_webpage(
orig_url, video_id,
note='Downloading webpage for description',
fatal=False)
if orig_webpage:
video_description = self._html_search_meta(
'description', orig_webpage, default=None)
if not video_description and not is_player:
self._downloader.report_warning('Cannot find video description')
# Extract upload date
if not timestamp:
timestamp = self._search_regex(
r'<time[^>]+datetime="([^"]+)"', webpage,
'timestamp', default=None)
try:
view_count = int(self._search_regex(r'UserPlays:(\d+)', webpage, 'view count'))
like_count = int(self._search_regex(r'UserLikes:(\d+)', webpage, 'like count'))
comment_count = int(self._search_regex(r'UserComments:(\d+)', webpage, 'comment count'))
except RegexNotFoundError:
# This info is only available in vimeo.com/{id} urls
view_count = None
like_count = None
comment_count = None
formats = []
source_format = self._extract_original_format(
'https://vimeo.com/' + video_id, video_id)
if source_format:
formats.append(source_format)
info_dict_config = self._parse_config(config, video_id)
formats.extend(info_dict_config['formats'])
self._vimeo_sort_formats(formats)
json_ld = self._search_json_ld(webpage, video_id, default={})
if not cc_license:
cc_license = self._search_regex(
r'<link[^>]+rel=["\']license["\'][^>]+href=(["\'])(?P<license>(?:(?!\1).)+)\1',
webpage, 'license', default=None, group='license')
channel_url = 'https://vimeo.com/channels/%s' % channel_id if channel_id else None
info_dict = {
'formats': formats,
'timestamp': unified_timestamp(timestamp),
'description': video_description,
'webpage_url': url,
'view_count': view_count,
'like_count': like_count,
'comment_count': comment_count,
'license': cc_license,
'channel_id': channel_id,
'channel_url': channel_url,
}
info_dict = merge_dicts(info_dict, info_dict_config, json_ld)
return info_dict
class VimeoOndemandIE(VimeoIE):
IE_NAME = 'vimeo:ondemand'
_VALID_URL = r'https?://(?:www\.)?vimeo\.com/ondemand/([^/]+/)?(?P<id>[^/?#&]+)'
_TESTS = [{
# ondemand video not available via https://vimeo.com/id
'url': 'https://vimeo.com/ondemand/20704',
'md5': 'c424deda8c7f73c1dfb3edd7630e2f35',
'info_dict': {
'id': '105442900',
'ext': 'mp4',
'title': 'המעבדה - במאי יותם פלדמן',
'uploader': 'גם סרטים',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/gumfilms',
'uploader_id': 'gumfilms',
'description': 'md5:4c027c965e439de4baab621e48b60791',
'upload_date': '20140906',
'timestamp': 1410032453,
},
'params': {
'format': 'best[protocol=https]',
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
# requires Referer to be passed along with og:video:url
'url': 'https://vimeo.com/ondemand/36938/126682985',
'info_dict': {
'id': '126584684',
'ext': 'mp4',
'title': 'Rävlock, rätt läte på rätt plats',
'uploader': 'Lindroth & Norin',
'uploader_url': r're:https?://(?:www\.)?vimeo\.com/lindrothnorin',
'uploader_id': 'lindrothnorin',
'description': 'md5:c3c46a90529612c8279fb6af803fc0df',
'upload_date': '20150502',
'timestamp': 1430586422,
},
'params': {
'skip_download': True,
},
'expected_warnings': ['Unable to download JSON metadata'],<|fim▁hole|> 'only_matching': True,
}, {
'url': 'https://vimeo.com/ondemand/141692381',
'only_matching': True,
}, {
'url': 'https://vimeo.com/ondemand/thelastcolony/150274832',
'only_matching': True,
}]
class VimeoChannelIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:channel'
_VALID_URL = r'https://vimeo\.com/channels/(?P<id>[^/?#]+)/?(?:$|[?#])'
_MORE_PAGES_INDICATOR = r'<a.+?rel="next"'
_TITLE = None
_TITLE_RE = r'<link rel="alternate"[^>]+?title="(.*?)"'
_TESTS = [{
'url': 'https://vimeo.com/channels/tributes',
'info_dict': {
'id': 'tributes',
'title': 'Vimeo Tributes',
},
'playlist_mincount': 25,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/channels/%s'
def _page_url(self, base_url, pagenum):
return '%s/videos/page:%d/' % (base_url, pagenum)
def _extract_list_title(self, webpage):
return self._TITLE or self._html_search_regex(
self._TITLE_RE, webpage, 'list title', fatal=False)
def _title_and_entries(self, list_id, base_url):
for pagenum in itertools.count(1):
page_url = self._page_url(base_url, pagenum)
webpage = self._download_webpage(
page_url, list_id,
'Downloading page %s' % pagenum)
if pagenum == 1:
yield self._extract_list_title(webpage)
# Try extracting href first since not all videos are available via
# short https://vimeo.com/id URL (e.g. https://vimeo.com/channels/tributes/6213729)
clips = re.findall(
r'id="clip_(\d+)"[^>]*>\s*<a[^>]+href="(/(?:[^/]+/)*\1)(?:[^>]+\btitle="([^"]+)")?', webpage)
if clips:
for video_id, video_url, video_title in clips:
yield self.url_result(
compat_urlparse.urljoin(base_url, video_url),
VimeoIE.ie_key(), video_id=video_id, video_title=video_title)
# More relaxed fallback
else:
for video_id in re.findall(r'id=["\']clip_(\d+)', webpage):
yield self.url_result(
'https://vimeo.com/%s' % video_id,
VimeoIE.ie_key(), video_id=video_id)
if re.search(self._MORE_PAGES_INDICATOR, webpage, re.DOTALL) is None:
break
def _extract_videos(self, list_id, base_url):
title_and_entries = self._title_and_entries(list_id, base_url)
list_title = next(title_and_entries)
return self.playlist_result(title_and_entries, list_id, list_title)
def _real_extract(self, url):
channel_id = self._match_id(url)
return self._extract_videos(channel_id, self._BASE_URL_TEMPL % channel_id)
class VimeoUserIE(VimeoChannelIE):
IE_NAME = 'vimeo:user'
_VALID_URL = r'https://vimeo\.com/(?!(?:[0-9]+|watchlater)(?:$|[?#/]))(?P<id>[^/]+)(?:/videos|[#?]|$)'
_TITLE_RE = r'<a[^>]+?class="user">([^<>]+?)</a>'
_TESTS = [{
'url': 'https://vimeo.com/nkistudio/videos',
'info_dict': {
'title': 'Nki',
'id': 'nkistudio',
},
'playlist_mincount': 66,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/%s'
class VimeoAlbumIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:album'
_VALID_URL = r'https://vimeo\.com/(?:album|showcase)/(?P<id>\d+)(?:$|[?#]|/(?!video))'
_TITLE_RE = r'<header id="page_header">\n\s*<h1>(.*?)</h1>'
_TESTS = [{
'url': 'https://vimeo.com/album/2632481',
'info_dict': {
'id': '2632481',
'title': 'Staff Favorites: November 2013',
},
'playlist_mincount': 13,
}, {
'note': 'Password-protected album',
'url': 'https://vimeo.com/album/3253534',
'info_dict': {
'title': 'test',
'id': '3253534',
},
'playlist_count': 1,
'params': {
'videopassword': 'youtube-dl',
}
}]
_PAGE_SIZE = 100
def _fetch_page(self, album_id, authorizaion, hashed_pass, page):
api_page = page + 1
query = {
'fields': 'link,uri',
'page': api_page,
'per_page': self._PAGE_SIZE,
}
if hashed_pass:
query['_hashed_pass'] = hashed_pass
videos = self._download_json(
'https://api.vimeo.com/albums/%s/videos' % album_id,
album_id, 'Downloading page %d' % api_page, query=query, headers={
'Authorization': 'jwt ' + authorizaion,
})['data']
for video in videos:
link = video.get('link')
if not link:
continue
uri = video.get('uri')
video_id = self._search_regex(r'/videos/(\d+)', uri, 'video_id', default=None) if uri else None
yield self.url_result(link, VimeoIE.ie_key(), video_id)
def _real_extract(self, url):
album_id = self._match_id(url)
webpage = self._download_webpage(url, album_id)
viewer = self._parse_json(self._search_regex(
r'bootstrap_data\s*=\s*({.+?})</script>',
webpage, 'bootstrap data'), album_id)['viewer']
jwt = viewer['jwt']
album = self._download_json(
'https://api.vimeo.com/albums/' + album_id,
album_id, headers={'Authorization': 'jwt ' + jwt},
query={'fields': 'description,name,privacy'})
hashed_pass = None
if try_get(album, lambda x: x['privacy']['view']) == 'password':
password = self._downloader.params.get('videopassword')
if not password:
raise ExtractorError(
'This album is protected by a password, use the --video-password option',
expected=True)
self._set_vimeo_cookie('vuid', viewer['vuid'])
try:
hashed_pass = self._download_json(
'https://vimeo.com/showcase/%s/auth' % album_id,
album_id, 'Verifying the password', data=urlencode_postdata({
'password': password,
'token': viewer['xsrft'],
}), headers={
'X-Requested-With': 'XMLHttpRequest',
})['hashed_pass']
except ExtractorError as e:
if isinstance(e.cause, compat_HTTPError) and e.cause.code == 401:
raise ExtractorError('Wrong password', expected=True)
raise
entries = OnDemandPagedList(functools.partial(
self._fetch_page, album_id, jwt, hashed_pass), self._PAGE_SIZE)
return self.playlist_result(
entries, album_id, album.get('name'), album.get('description'))
class VimeoGroupsIE(VimeoChannelIE):
IE_NAME = 'vimeo:group'
_VALID_URL = r'https://vimeo\.com/groups/(?P<id>[^/]+)(?:/(?!videos?/\d+)|$)'
_TESTS = [{
'url': 'https://vimeo.com/groups/kattykay',
'info_dict': {
'id': 'kattykay',
'title': 'Katty Kay',
},
'playlist_mincount': 27,
}]
_BASE_URL_TEMPL = 'https://vimeo.com/groups/%s'
class VimeoReviewIE(VimeoBaseInfoExtractor):
IE_NAME = 'vimeo:review'
IE_DESC = 'Review pages on vimeo'
_VALID_URL = r'(?P<url>https://vimeo\.com/[^/]+/review/(?P<id>[^/]+)/[0-9a-f]{10})'
_TESTS = [{
'url': 'https://vimeo.com/user21297594/review/75524534/3c257a1b5d',
'md5': 'c507a72f780cacc12b2248bb4006d253',
'info_dict': {
'id': '75524534',
'ext': 'mp4',
'title': "DICK HARDWICK 'Comedian'",
'uploader': 'Richard Hardwick',
'uploader_id': 'user21297594',
'description': "Comedian Dick Hardwick's five minute demo filmed in front of a live theater audience.\nEdit by Doug Mattocks",
},
'expected_warnings': ['Unable to download JSON metadata'],
}, {
'note': 'video player needs Referer',
'url': 'https://vimeo.com/user22258446/review/91613211/13f927e053',
'md5': '6295fdab8f4bf6a002d058b2c6dce276',
'info_dict': {
'id': '91613211',
'ext': 'mp4',
'title': 're:(?i)^Death by dogma versus assembling agile . Sander Hoogendoorn',
'uploader': 'DevWeek Events',
'duration': 2773,
'thumbnail': r're:^https?://.*\.jpg$',
'uploader_id': 'user22258446',
},
'skip': 'video gone',
}, {
'note': 'Password protected',
'url': 'https://vimeo.com/user37284429/review/138823582/c4d865efde',
'info_dict': {
'id': '138823582',
'ext': 'mp4',
'title': 'EFFICIENT PICKUP MASTERCLASS MODULE 1',
'uploader': 'TMB',
'uploader_id': 'user37284429',
},
'params': {
'videopassword': 'holygrail',
},
'skip': 'video gone',
}]
def _real_initialize(self):
self._login()
def _real_extract(self, url):
page_url, video_id = re.match(self._VALID_URL, url).groups()
clip_data = self._download_json(
page_url.replace('/review/', '/review/data/'),
video_id)['clipData']
config_url = clip_data['configUrl']
config = self._download_json(config_url, video_id)
info_dict = self._parse_config(config, video_id)
source_format = self._extract_original_format(
page_url + '/action', video_id)
if source_format:
info_dict['formats'].append(source_format)
self._vimeo_sort_formats(info_dict['formats'])
info_dict['description'] = clean_html(clip_data.get('description'))
return info_dict
class VimeoWatchLaterIE(VimeoChannelIE):
IE_NAME = 'vimeo:watchlater'
IE_DESC = 'Vimeo watch later list, "vimeowatchlater" keyword (requires authentication)'
_VALID_URL = r'https://vimeo\.com/(?:home/)?watchlater|:vimeowatchlater'
_TITLE = 'Watch Later'
_LOGIN_REQUIRED = True
_TESTS = [{
'url': 'https://vimeo.com/watchlater',
'only_matching': True,
}]
def _real_initialize(self):
self._login()
def _page_url(self, base_url, pagenum):
url = '%s/page:%d/' % (base_url, pagenum)
request = sanitized_Request(url)
# Set the header to get a partial html page with the ids,
# the normal page doesn't contain them.
request.add_header('X-Requested-With', 'XMLHttpRequest')
return request
def _real_extract(self, url):
return self._extract_videos('watchlater', 'https://vimeo.com/watchlater')
class VimeoLikesIE(VimeoChannelIE):
_VALID_URL = r'https://(?:www\.)?vimeo\.com/(?P<id>[^/]+)/likes/?(?:$|[?#]|sort:)'
IE_NAME = 'vimeo:likes'
IE_DESC = 'Vimeo user likes'
_TESTS = [{
'url': 'https://vimeo.com/user755559/likes/',
'playlist_mincount': 293,
'info_dict': {
'id': 'user755559',
'title': 'urza’s Likes',
},
}, {
'url': 'https://vimeo.com/stormlapse/likes',
'only_matching': True,
}]
def _page_url(self, base_url, pagenum):
return '%s/page:%d/' % (base_url, pagenum)
def _real_extract(self, url):
user_id = self._match_id(url)
return self._extract_videos(user_id, 'https://vimeo.com/%s/likes' % user_id)
class VHXEmbedIE(VimeoBaseInfoExtractor):
IE_NAME = 'vhx:embed'
_VALID_URL = r'https?://embed\.vhx\.tv/videos/(?P<id>\d+)'
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
config_url = self._parse_json(self._search_regex(
r'window\.OTTData\s*=\s*({.+})', webpage,
'ott data'), video_id, js_to_json)['config_url']
config = self._download_json(config_url, video_id)
info = self._parse_config(config, video_id)
self._vimeo_sort_formats(info['formats'])
return info<|fim▁end|> | }, {
'url': 'https://vimeo.com/ondemand/nazmaalik', |
<|file_name|>vif_driver.py<|end_file_name|><|fim▁begin|>from abc import abstractmethod
class VIFDriver(object):
@abstractmethod
def after_device_destroy(self, environ, domxml):
return domxml
<|fim▁hole|> @abstractmethod
def after_network_setup(self, environ, json_content):
return json_content
@abstractmethod
def after_nic_hotplug(self, environ, domxml):
return domxml
@abstractmethod
def after_nic_unplug(self, environ, domxml):
return domxml
@abstractmethod
def after_get_caps(self, environ, json_content):
return json_content
@abstractmethod
def after_get_stats(self, environ, json_content):
return json_content
@abstractmethod
def after_vm_start(self, environ, domxml):
return domxml
def after_migration_source(self, environ, domxml):
return domxml
def after_migration_destination(self, environ, domxml):
return domxml
@abstractmethod
def before_get_caps(self, environ, json_content):
return json_content
@abstractmethod
def before_get_stats(self, environ, json_content):
return json_content
@abstractmethod
def before_nic_hotplug(self, environ, domxml):
return domxml
@abstractmethod
def before_nic_unplug(self, environ, domxml):
return domxml
@abstractmethod
def before_device_create(self, environ, domxml):
return domxml
@abstractmethod
def before_device_destroy(self, environ, domxml):
return domxml
@abstractmethod
def before_migration_source(self, environ, domxml):
return domxml
@abstractmethod
def before_migration_destination(self, environ, domxml):
return domxml
@abstractmethod
def before_network_setup(self, environ, json_content):
return json_content
@abstractmethod
def before_vm_start(self, environ, domxml):
return domxml<|fim▁end|> | @abstractmethod
def after_device_create(self, environ, domxml):
return domxml
|
<|file_name|>Rect.js<|end_file_name|><|fim▁begin|>import PropTypes from 'prop-types';
import Element from './Element';
export default class Rect extends Element {
static displayName = 'Rect';
static propTypes = {
x: PropTypes.oneOfType([PropTypes.func, PropTypes.number]),
y: PropTypes.oneOfType([PropTypes.func, PropTypes.number]),
height: PropTypes.oneOfType([PropTypes.func, PropTypes.number]),
width: PropTypes.oneOfType([PropTypes.func, PropTypes.number]),
};
static defaultProps = {
...Element.defaultProps,
};
defaultComponent='rect'<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>_reference_data_sets_operations.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ReferenceDataSetsOperations:
"""ReferenceDataSetsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.timeseriesinsights.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def create_or_update(
self,
resource_group_name: str,
environment_name: str,
reference_data_set_name: str,
parameters: "_models.ReferenceDataSetCreateOrUpdateParameters",
**kwargs
) -> "_models.ReferenceDataSetResource":
"""Create or update a reference data set in the specified environment.
:param resource_group_name: Name of an Azure Resource group.
:type resource_group_name: str
:param environment_name: The name of the Time Series Insights environment associated with the
specified resource group.
:type environment_name: str
:param reference_data_set_name: Name of the reference data set.
:type reference_data_set_name: str
:param parameters: Parameters for creating a reference data set.
:type parameters: ~azure.mgmt.timeseriesinsights.models.ReferenceDataSetCreateOrUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ReferenceDataSetResource, or the result of cls(response)
:rtype: ~azure.mgmt.timeseriesinsights.models.ReferenceDataSetResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ReferenceDataSetResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'environmentName': self._serialize.url("environment_name", environment_name, 'str'),
'referenceDataSetName': self._serialize.url("reference_data_set_name", reference_data_set_name, 'str', max_length=63, min_length=3, pattern=r'^[A-Za-z0-9]'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ReferenceDataSetCreateOrUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ReferenceDataSetResource', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ReferenceDataSetResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets/{referenceDataSetName}'} # type: ignore
async def get(
self,
resource_group_name: str,
environment_name: str,
reference_data_set_name: str,
**kwargs
) -> "_models.ReferenceDataSetResource":
"""Gets the reference data set with the specified name in the specified environment.
:param resource_group_name: Name of an Azure Resource group.
:type resource_group_name: str
:param environment_name: The name of the Time Series Insights environment associated with the
specified resource group.
:type environment_name: str
:param reference_data_set_name: The name of the Time Series Insights reference data set
associated with the specified environment.
:type reference_data_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ReferenceDataSetResource, or the result of cls(response)
:rtype: ~azure.mgmt.timeseriesinsights.models.ReferenceDataSetResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ReferenceDataSetResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-15"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'environmentName': self._serialize.url("environment_name", environment_name, 'str'),
'referenceDataSetName': self._serialize.url("reference_data_set_name", reference_data_set_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ReferenceDataSetResource', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets/{referenceDataSetName}'} # type: ignore
async def update(
self,
resource_group_name: str,
environment_name: str,
reference_data_set_name: str,
reference_data_set_update_parameters: "_models.ReferenceDataSetUpdateParameters",
**kwargs
) -> "_models.ReferenceDataSetResource":
"""Updates the reference data set with the specified name in the specified subscription, resource
group, and environment.
:param resource_group_name: Name of an Azure Resource group.
:type resource_group_name: str
:param environment_name: The name of the Time Series Insights environment associated with the
specified resource group.
:type environment_name: str
:param reference_data_set_name: The name of the Time Series Insights reference data set
associated with the specified environment.
:type reference_data_set_name: str
:param reference_data_set_update_parameters: Request object that contains the updated
information for the reference data set.
:type reference_data_set_update_parameters: ~azure.mgmt.timeseriesinsights.models.ReferenceDataSetUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ReferenceDataSetResource, or the result of cls(response)
:rtype: ~azure.mgmt.timeseriesinsights.models.ReferenceDataSetResource
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ReferenceDataSetResource"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'environmentName': self._serialize.url("environment_name", environment_name, 'str'),
'referenceDataSetName': self._serialize.url("reference_data_set_name", reference_data_set_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(reference_data_set_update_parameters, 'ReferenceDataSetUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ReferenceDataSetResource', pipeline_response)
if cls:<|fim▁hole|> update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets/{referenceDataSetName}'} # type: ignore
async def delete(
self,
resource_group_name: str,
environment_name: str,
reference_data_set_name: str,
**kwargs
) -> None:
"""Deletes the reference data set with the specified name in the specified subscription, resource
group, and environment.
:param resource_group_name: Name of an Azure Resource group.
:type resource_group_name: str
:param environment_name: The name of the Time Series Insights environment associated with the
specified resource group.
:type environment_name: str
:param reference_data_set_name: The name of the Time Series Insights reference data set
associated with the specified environment.
:type reference_data_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-15"
accept = "application/json"
# Construct URL
url = self.delete.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'environmentName': self._serialize.url("environment_name", environment_name, 'str'),
'referenceDataSetName': self._serialize.url("reference_data_set_name", reference_data_set_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets/{referenceDataSetName}'} # type: ignore
async def list_by_environment(
self,
resource_group_name: str,
environment_name: str,
**kwargs
) -> "_models.ReferenceDataSetListResponse":
"""Lists all the available reference data sets associated with the subscription and within the
specified resource group and environment.
:param resource_group_name: Name of an Azure Resource group.
:type resource_group_name: str
:param environment_name: The name of the Time Series Insights environment associated with the
specified resource group.
:type environment_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ReferenceDataSetListResponse, or the result of cls(response)
:rtype: ~azure.mgmt.timeseriesinsights.models.ReferenceDataSetListResponse
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ReferenceDataSetListResponse"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-05-15"
accept = "application/json"
# Construct URL
url = self.list_by_environment.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'environmentName': self._serialize.url("environment_name", environment_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ReferenceDataSetListResponse', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_by_environment.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.TimeSeriesInsights/environments/{environmentName}/referenceDataSets'} # type: ignore<|fim▁end|> | return cls(pipeline_response, deserialized, {})
return deserialized |
<|file_name|>datasource.test.js<|end_file_name|><|fim▁begin|>"use strict";
var mapnik = require('../');
var assert = require('assert');
var path = require('path');
mapnik.register_datasource(path.join(mapnik.settings.paths.input_plugins,'geojson.input'));
mapnik.register_datasource(path.join(mapnik.settings.paths.input_plugins,'ogr.input'));
mapnik.register_datasource(path.join(mapnik.settings.paths.input_plugins,'shape.input'));
mapnik.register_datasource(path.join(mapnik.settings.paths.input_plugins,'gdal.input'));
describe('mapnik.Datasource', function() {
it('should throw with invalid usage', function() {
assert.throws(function() { mapnik.Datasource('foo'); });
assert.throws(function() { mapnik.Datasource({ 'foo': 1 }); });
assert.throws(function() { mapnik.Datasource({ 'type': 'foo' }); });
assert.throws(function() { mapnik.Datasource({ 'type': 'shape' }); });<|fim▁hole|> assert.throws(function() { new mapnik.Datasource('foo'); },
/Must provide an object, eg \{type: 'shape', file : 'world.shp'\}/);
assert.throws(function() { new mapnik.Datasource(); });
assert.throws(function() { new mapnik.Datasource({ 'foo': 1 }); });
assert.throws(function() { new mapnik.Datasource({ 'type': 'foo' }); });
assert.throws(function() { new mapnik.Datasource({ 'type': 'shape' }); },
/Shape Plugin: missing <file> parameter/);
});
it('should validate with known shapefile - ogr', function() {
var options = {
type: 'ogr',
file: './test/data/world_merc.shp',
layer: 'world_merc'
};
var ds = new mapnik.Datasource(options);
assert.ok(ds);
assert.deepEqual(ds.parameters(), options);
var features = [];
var featureset = ds.featureset();
var feature;
while ((feature = featureset.next())) {
features.push(feature);
}
assert.equal(features.length, 245);
assert.deepEqual(features[244].attributes(), {
AREA: 1638094,
FIPS: 'RS',
ISO2: 'RU',
ISO3: 'RUS',
LAT: 61.988,
LON: 96.689,
NAME: 'Russia',
POP2005: 143953092,
REGION: 150,
SUBREGION: 151,
UN: 643
});
var expected = {
type: 'vector',
extent: [
-20037508.342789248,
-8283343.693882697,
20037508.342789244,
18365151.363070473
],
encoding: 'utf-8',
fields: {
FIPS: 'String',
ISO2: 'String',
ISO3: 'String',
UN: 'Number',
NAME: 'String',
AREA: 'Number',
POP2005: 'Number',
REGION: 'Number',
SUBREGION: 'Number',
LON: 'Number',
LAT: 'Number'
},
geometry_type: 'polygon',
proj4:'+proj=merc +lon_0=0 +lat_ts=0 +x_0=0 +y_0=0 +datum=WGS84 +units=m +no_defs'
};
var actual = ds.describe();
assert.equal(actual.proj4, expected.proj4);
assert.deepEqual(actual.type, expected.type);
assert.deepEqual(actual.encoding, expected.encoding);
assert.deepEqual(actual.fields, expected.fields);
assert.deepEqual(actual.geometry_type, expected.geometry_type);
assert.deepEqual(ds.extent(), expected.extent);
assert.deepEqual(ds.fields(), expected.fields);
});
it('should validate with known shapefile', function() {
var options = {
type: 'shape',
file: './test/data/world_merc.shp'
};
var ds = new mapnik.Datasource(options);
assert.ok(ds);
assert.deepEqual(ds.parameters(), options);
var features = [];
var featureset = ds.featureset();
var feature;
while ((feature = featureset.next())) {
features.push(feature);
}
assert.equal(features.length, 245);
assert.deepEqual(features[244].attributes(), {
AREA: 1638094,
FIPS: 'RS',
ISO2: 'RU',
ISO3: 'RUS',
LAT: 61.988,
LON: 96.689,
NAME: 'Russia',
POP2005: 143953092,
REGION: 150,
SUBREGION: 151,
UN: 643
});
var expected = {
type: 'vector',
extent: [
-20037508.342789248,
-8283343.693882697,
20037508.342789244,
18365151.363070473
],
encoding: 'utf-8',
fields: {
FIPS: 'String',
ISO2: 'String',
ISO3: 'String',
UN: 'Number',
NAME: 'String',
AREA: 'Number',
POP2005: 'Number',
REGION: 'Number',
SUBREGION: 'Number',
LON: 'Number',
LAT: 'Number'
},
geometry_type: 'polygon'
};
var actual = ds.describe();
assert.deepEqual(actual.type, expected.type);
assert.deepEqual(actual.encoding, expected.encoding);
assert.deepEqual(actual.fields, expected.fields);
assert.deepEqual(actual.geometry_type, expected.geometry_type);
assert.deepEqual(ds.extent(), expected.extent);
assert.deepEqual(ds.fields(), expected.fields);
});
it('test invalid use of memory datasource', function() {
var ds = new mapnik.MemoryDatasource({'extent': '-180,-90,180,90'});
assert.throws(function() { ds.add(); });
assert.throws(function() { ds.add(null); });
assert.throws(function() { ds.add({}, null); });
assert.throws(function() { ds.add({'wkt': '1234'}); });
assert.equal(false, ds.add({}));
});
it('test empty memory datasource', function() {
var ds = new mapnik.MemoryDatasource({'extent': '-180,-90,180,90'});
var empty_fs = ds.featureset();
assert.equal(typeof(empty_fs),'undefined');
assert.equal(empty_fs, null);
});
it('test empty geojson datasource', function() {
var input = {
"type": "Feature",
"properties": {
"something": []
},
"geometry": {
"type": "Point",
"coordinates": [ 1, 1 ]
}
};
var ds = new mapnik.Datasource({ type:'geojson', inline: JSON.stringify(input) });
var fs = ds.featureset()
var feat = fs.next();
var feature = JSON.parse(feat.toJSON());
// pass invalid extent to filter all features out
// resulting in empty featureset that should be returned
// as a null object
var empty_fs = ds.featureset({extent:[-1,-1,0,0]});
assert.equal(typeof(empty_fs),'undefined');
assert.equal(empty_fs, null);
});
it('test empty geojson datasource due to invalid json string', function() {
var input = "{ \"type\": \"FeatureCollection\", \"features\": [{ \"oofda\" } ] }";
// from string will fail to parse
assert.throws(function() { new mapnik.Datasource({ type:'geojson', inline: inline, cache_features: false }); });
assert.throws(function() { new mapnik.Datasource({ type:'geojson', inline: fs.readFileSync('./test/data/parse.error.json').toString(), cache_features: false }); });
});
it('test empty geojson datasource due to invalid json file', function() {
assert.throws(function() { new mapnik.Datasource({ type:'geojson', file: './test/data/parse.error.json', cache_features: true }); });
});
it('test valid use of memory datasource', function() {
var ds = new mapnik.MemoryDatasource({'extent': '-180,-90,180,90'});
assert.equal(true, ds.add({ 'x': 0, 'y': 0 }));
assert.equal(true, ds.add({ 'x': 0.23432, 'y': 0.234234 }));
assert.equal(true, ds.add({ 'x': 1, 'y': 1 , 'properties': {'a':'b', 'c':1, 'd':0.23 }}));
var expected_describe = {
type: 'vector',
encoding: 'utf-8',
fields: {},
geometry_type: 'collection'
};
assert.deepEqual(expected_describe, ds.describe());
// Currently descriptors can not be added to memory datasource so will always be empty object
assert.deepEqual({},ds.fields());
});
it('should validate with raster', function() {
var options = {
type: 'gdal',
file: './test/data/images/sat_image.tif'
};
var ds = new mapnik.Datasource(options);
assert.ok(ds);
assert.deepEqual(ds.parameters(), options);
var describe = ds.describe();
var expected = { type: 'raster',
encoding: 'utf-8',
fields: { nodata: 'Number' },
geometry_type: 'raster'
};
assert.deepEqual(expected,describe);
// Test that if added to layer, can get datasource back
var layer = new mapnik.Layer('foo', '+init=epsg:4326');
layer.datasource = ds;
var ds2 = layer.datasource;
assert.ok(ds2);
assert.deepEqual(ds2.parameters(), options);
});
});<|fim▁end|> | |
<|file_name|>false.rs<|end_file_name|><|fim▁begin|>// rustfmt-empty_item_single_line: false
// Empty impl on single line
impl Lorem {
}<|fim▁hole|>impl Ipsum {
}
fn lorem() {
}
fn lorem() {
}<|fim▁end|> | |
<|file_name|>encoding_support.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
//! Parsing stylesheets from bytes (not `&str`).
extern crate encoding;
use context::QuirksMode;
use cssparser::{stylesheet_encoding, EncodingSupport};
use error_reporting::ParseErrorReporter;
use media_queries::MediaList;
use self::encoding::{EncodingRef, DecoderTrap};
use shared_lock::SharedRwLock;
use std::str;
use stylearc::Arc;
use stylesheets::{Stylesheet, StylesheetLoader, Origin, UrlExtraData};
struct RustEncoding;
impl EncodingSupport for RustEncoding {
type Encoding = EncodingRef;
fn utf8() -> Self::Encoding {
encoding::all::UTF_8
}
fn is_utf16_be_or_le(encoding: &Self::Encoding) -> bool {
matches!(encoding.name(), "utf-16be" | "utf-16le")
}
fn from_label(ascii_label: &[u8]) -> Option<Self::Encoding> {
str::from_utf8(ascii_label).ok().and_then(encoding::label::encoding_from_whatwg_label)
}
}
fn decode_stylesheet_bytes(css: &[u8], protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>)
-> (String, EncodingRef) {
let fallback_encoding = stylesheet_encoding::<RustEncoding>(
css, protocol_encoding_label.map(str::as_bytes), environment_encoding);
let (result, used_encoding) = encoding::decode(css, DecoderTrap::Replace, fallback_encoding);
(result.unwrap(), used_encoding)
}
impl Stylesheet {
/// Parse a stylesheet from a set of bytes, potentially received over the
/// network.
///
/// Takes care of decoding the network bytes and forwards the resulting
/// string to `Stylesheet::from_str`.
pub fn from_bytes(bytes: &[u8],
url_data: UrlExtraData,
protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>,
origin: Origin,
media: MediaList,
shared_lock: SharedRwLock,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &ParseErrorReporter,
quirks_mode: QuirksMode)
-> Stylesheet {
let (string, _) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Stylesheet::from_str(&string,
url_data,
origin,
Arc::new(shared_lock.wrap(media)),
shared_lock,
stylesheet_loader,
error_reporter,
quirks_mode,
0u64)
}
/// Updates an empty stylesheet with a set of bytes that reached over the
/// network.
pub fn update_from_bytes(existing: &Stylesheet,<|fim▁hole|> bytes: &[u8],
protocol_encoding_label: Option<&str>,
environment_encoding: Option<EncodingRef>,
url_data: UrlExtraData,
stylesheet_loader: Option<&StylesheetLoader>,
error_reporter: &ParseErrorReporter) {
let (string, _) = decode_stylesheet_bytes(
bytes, protocol_encoding_label, environment_encoding);
Self::update_from_str(existing,
&string,
url_data,
stylesheet_loader,
error_reporter,
0)
}
}<|fim▁end|> | |
<|file_name|>min_order_common.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Common design parameters for minimum order design methods
@author: Christian Muenker
"""
from __future__ import print_function, division, unicode_literals
#from importlib import import_module
<|fim▁hole|>class min_order_common(object):
def __init__(self):
self.name = {'common':'Common filter params'}
# message for min. filter order response types:
msg_min = ("Enter the maximum pass band ripple, minimum stop band "
"attenuation and the corresponding corner frequencies.")
# VISIBLE widgets for all man. / min. filter order response types:
vis_min = ['fo','fspecs','aspecs'] # minimum filter order
# ENABLED widgets for all man. / min. filter order response types:
enb_min = ['fo','fspecs','aspecs'] # minimum filter order
# common parameters for all man. / min. filter order response types:
par_min = ['f_S', 'A_PB', 'A_SB'] # enabled widget for min. filt. order
# Common data for all man. / min. filter order response types:
# This data is merged with the entries for individual response types
# (common data comes first):
self.com = {"min":{"enb":enb_min, "msg":msg_min, "par": par_min}}
self.rt = {
"LP": {"min":{"par":['f_S','A_PB','A_SB','F_PB','F_SB']}},
"HP": {"min":{"par":['f_S','A_PB','A_SB','F_SB','F_PB']}},
"BP": {"min":{"par":['f_S','A_PB','A_SB','A_SB2',
'F_SB','F_PB','F_PB2','F_SB2']}},
"BS": {"min":{"par":['f_S','A_PB','A_SB','A_PB2',
'F_PB','F_SB','F_SB2','F_PB2']}}
# "HIL": {"man":{"par":['F_SB', 'F_PB', 'F_PB2', 'F_SB2','A_SB','A_PB','A_SB2']}}
#"DIFF":
}<|fim▁end|> | #import filterbroker as fb
|
<|file_name|>DataProvider.java<|end_file_name|><|fim▁begin|>package by.bsuir.verkpavel.adb.data;
<|fim▁hole|>
import by.bsuir.verkpavel.adb.data.entity.Account;
import by.bsuir.verkpavel.adb.data.entity.Client;
import by.bsuir.verkpavel.adb.data.entity.Deposit;
import by.bsuir.verkpavel.adb.data.entity.TransactionsInfo;
//MAYBE Remove this facade and use separate class or add three getInstance methods
public class DataProvider {
private static DataProvider instance;
private Connection connection;
private ClientProvider clientProvider;
private DepositProvider depositProvider;
private AccountProvider accountProvider;
private static final String DB_PATH = "jdbc:mysql://localhost:3306/bank_users?useUnicode=true&characterEncoding=utf8";
private static final String DB_USER_NAME = "root";
private static final String DB_PASSWORD = "123456";
private DataProvider() {
try {
Class.forName("com.mysql.jdbc.Driver");
this.connection = DriverManager.getConnection(DB_PATH, DB_USER_NAME, DB_PASSWORD);
this.clientProvider = ClientProvider.getInstance(connection);
this.depositProvider = DepositProvider.getInstance(connection);
this.accountProvider = AccountProvider.getInstance(connection);
} catch (ClassNotFoundException e) {
System.out.println("DB driver not found");
e.printStackTrace();
} catch (SQLException e) {
e.printStackTrace();
}
};
public static DataProvider getInstance() {
if (instance == null) {
instance = new DataProvider();
}
return instance;
}
public ArrayList<String> getCityList() {
return clientProvider.getCityList();
}
public ArrayList<String> getFamilyStatuses() {
return clientProvider.getFamilyStatuses();
}
public ArrayList<String> getNationalitys() {
return clientProvider.getNationalitys();
}
public ArrayList<String> getDisabilitys() {
return clientProvider.getDisabilitys();
}
public String saveClient(Client client) {
return clientProvider.saveClient(client);
}
public String updateClient(Client client) {
return clientProvider.updateClient(client);
}
public ArrayList<Client> getAllClients() {
return clientProvider.getAllClients();
}
public void deleteClient(Client client) {
clientProvider.deleteClient(client);
}
public ArrayList<String> getUserFullNames() {
return clientProvider.getUserFullNames();
}
public ArrayList<String> getCurrency() {
return depositProvider.getCurrency();
}
public ArrayList<String> getDepositTypeList() {
return depositProvider.getDepositTypeList();
}
public String saveDeposit(Deposit deposit) {
return depositProvider.saveDeposit(deposit);
}
public ArrayList<Deposit> getAllDeposits() {
return depositProvider.getAllDeposits();
}
public void updateDepositEndDate(Deposit deposit, String newDate) {
depositProvider.updateDepositEndDate(deposit, newDate);
}
public ArrayList<Account> getAllAccounts() {
return accountProvider.getAllAccounts();
}
public void addTransaction(Account from, Account to, double sum, int currency) {
try {
accountProvider.addTransaction(from, to, sum, currency);
} catch (SQLException e) {
e.printStackTrace();
}
}
public void addMonoTransaction(Account from, Account to, double sum, int currency) {
try {
accountProvider.addMonoTransaction(from, to, sum, currency);
} catch (SQLException e) {
e.printStackTrace();
}
}
public Account[] getAccountByDeposit(Deposit deposit) {
return accountProvider.getAccountByDeposit(deposit);
}
public Account getCashBoxAccount() {
return accountProvider.getCashBoxAccount();
}
public void createAccountsByDeposit(Deposit deposit) {
accountProvider.createAccountByDeposit(deposit);
}
public Account getFDBAccount() {
return accountProvider.getFDBAccount();
}
public ArrayList<TransactionsInfo> getTransatcionsByAccount(Account account) {
return accountProvider.getTransactionByAccount(account);
}
public ArrayList<Deposit> getAllActiveDeposits() {
return depositProvider.getAllActiveDeposits();
}
public void disableDeposit(Deposit deposit) {
depositProvider.disableDeposit(deposit);
}
}<|fim▁end|> | import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.util.ArrayList; |
<|file_name|>Gruntfile.js<|end_file_name|><|fim▁begin|>module.exports = function(grunt) {
"use strict";
var jsFiles = ["Gruntfile.js", "src/**/*.js"];
var htmlfiles = ["src/view/todoList.html", "src/view/todoFooter.html", "src/view/todoCreater.html"];
grunt.initConfig({
jshint: {
all: jsFiles,
options: {
jshintrc: ".jshintrc",
jshintignore: ".jshintignore"
}
},
watch: {
css: {
options: {
livereload: true
},<|fim▁hole|>
},
js: {
options: {
livereload: true
},
files: ["src/**/*.js"]
},
html: {
options: {
livereload: true
},
files: ["src/**/*.html"],
tasks: ["template"]
}
},
jsbeautifier: {
js: {
src: jsFiles,
options: {
config: "jsbeautifier.json"
}
},
json: {
fileTypes: [".json"],
src: ["bower.json", "package.json", "jsbeautifier.json"],
options: {
config: "jsbeautifier.json"
}
}
}
});
grunt.loadNpmTasks("grunt-contrib-jshint");
grunt.loadNpmTasks("grunt-jsbeautifier");
grunt.loadNpmTasks("grunt-contrib-watch");
/*
grunt.loadTasks = "tasks";
require("matchdep").filterAll("grunt-*").forEach(grunt.loadNpmTasks);
*/
grunt.registerTask("template", "Converting HTML templates into JSON", function() {
var _ = require("underscore");
var src = "";
htmlfiles.forEach(function(file) {
var filetext = grunt.file.read(file).split("\t").join("")
.split("\n").join("")
.split(">").map(function(v) {
return v.trim();
}).join(">");
src = src + "templates[\"" + file.split("/").pop() + "\"] = " + _.template(filetext).source + ";\n";
});
grunt.file.write("src/template.js", "templates = {};" + src);
console.log("src/template.js Generated");
});
grunt.registerTask("default", ["jsbeautifier", "jshint"]);
};<|fim▁end|> | files: ["src/**/*.css"] |
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>module.exports = function(config) {
config.set({
basePath: './',
frameworks: ['systemjs', 'jasmine'],
systemjs: {
configFile: 'config.js',
config: {
paths: {
"*": null,
"src/*": "src/*",
"typescript": "node_modules/typescript/lib/typescript.js",
"systemjs": "node_modules/systemjs/dist/system.js",
'system-polyfills': 'node_modules/systemjs/dist/system-polyfills.js',
'es6-module-loader': 'node_modules/es6-module-loader/dist/es6-module-loader.js'
},
packages: {
'test/unit': {
defaultExtension: 'ts'
},
'src': {
defaultExtension: 'ts'
}
},
transpiler: 'typescript'
},
serveFiles: [
'src/**/*.ts',
'jspm_packages/**/*.js'
]
},
files: [
'test/unit/*.spec.ts'
],
exclude: [],
<|fim▁hole|> colors: true,
logLevel: config.LOG_INFO,
autoWatch: true,
browsers: ['Chrome'],
singleRun: false
});
};<|fim▁end|> | preprocessors: { },
reporters: ['progress'],
port: 9876,
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![allow(non_upper_case_globals)]
// External crates
#[macro_use]
extern crate clap;
extern crate num_cpus;
extern crate num;
extern crate time;
// External modules
use clap::App;
use num::complex::Complex64;
use time::{precise_time_ns};
// Rust modules
use std::fs::File;
use std::io::prelude::Write;
use std::io::Result;
use std::io::BufWriter;
use std::fs::OpenOptions;
use std::path::Path;
use std::fs;
// Configuration file, reflects command line options
#[derive(Copy, Clone)]
pub struct MandelConfig {
pub re1: f64,
pub re2: f64,
pub img1: f64,
pub img2: f64,
pub x_step: f64,
pub y_step: f64,
pub max_iter: u32,
pub img_size: u32,
pub write_metadata: bool,
pub no_ppm: bool,
pub num_threads: u32,
pub num_of_runs: u32
}
include!(concat!(env!("OUT_DIR"), "/compiler_version.rs"));
// Parse command line options via clap and returns the responding configuration
pub fn parse_arguments() -> MandelConfig {
let matches = App::new("mandel_rust")
.version("0.3")
.author("Willi Kappler <[email protected]>")
.about("Simple mandelbrot written in pure rust")
.args_from_usage(
"--re1=[REAL1] 'left real part (default: -2.0)'
--re2=[REAL2] 'right real part (default: 1.0)'
--img1=[IMAGINARY1] 'lower part (default: -1.50)'
--img2=[IMAGINARY2] 'upper part (default: 1.50)'
--write_metadata 'write metadata like run time into the ppm file (default: off)'
--no_ppm 'disable creation of the ppm file, just run the calculation (default: off)'
--bench 'use all available CPUs (default: off), will change in the future'
--max_iter=[MAX_ITER] 'maximum number of iterations (default: 4096)'
--img_size=[IMAGE_SIZE] 'size of image in pixel (square, default: 2048, must be a power of two)'
--num_of_runs=[NUM_OF_RUNS] 'number of repetitive runs (default: 2)'
--num_threads=[NUMBER_OF_THREADS] 'number of threads to use (default: 2)'")
.get_matches();
let re1 = value_t!(matches.value_of("REAL1"), f64).unwrap_or(-2.0);
let re2 = value_t!(matches.value_of("REAL2"), f64).unwrap_or(1.0);
let img1 = value_t!(matches.value_of("IMAGINARY1"), f64).unwrap_or(-1.5);
let img2 = value_t!(matches.value_of("IMAGINARY2"), f64).unwrap_or(1.5);
let metadata = matches.is_present("write_metadata");
let bench = matches.is_present("bench");
let no_ppm = matches.is_present("no_ppm");
let max_iter = value_t!(matches.value_of("MAX_ITER"), u32).unwrap_or(4096);
let img_size = value_t!(matches.value_of("IMAGE_SIZE"), u32).unwrap_or(2048);
let num_of_runs = value_t!(matches.value_of("NUM_OF_RUNS"), u32).unwrap_or(2);
let num_threads = if bench { num_cpus::get() as u32 } else {
value_t!(matches.value_of("NUMBER_OF_THREADS"), u32).unwrap_or(2) };
assert!(re1 < re2);
assert!(img1 < img2);
assert!(max_iter > 0);
assert!(img_size > 0);
assert!(num_threads > 0);
println!("Configuration: re1: {:.2}, re2: {:.2}, img1: {:.2}, img2: {:.2}, max_iter: {}, img_size: {}, num_threads: {}",
re1, re2, img1, img2, max_iter, img_size, num_threads);
let x_step = (re2 - re1) / (img_size as f64);
let y_step = (img2 - img1) / (img_size as f64);
MandelConfig{
re1: re1,
re2: re2,
img1: img1,
img2: img2,
x_step: x_step,
y_step: y_step,
max_iter: max_iter,
img_size: img_size,
write_metadata: metadata,
no_ppm: no_ppm,
num_threads: num_threads,
num_of_runs: num_of_runs
}
}
// The inner iteration loop of the mandelbrot calculation
// See https://en.wikipedia.org/wiki/Mandelbrot_set
pub fn mandel_iter(max_iter: u32, c: Complex64) -> u32 {
let mut z: Complex64 = c;
let mut iter = 0;
while (z.norm_sqr() <= 4.0) && (iter < max_iter) {
z = c + (z * z);
iter = iter + 1;
}
iter
}
// Write calculated mandelbrot set as PPM image.
// Add run time information as comment.
fn write_image(file_name: &str, mandel_config: &MandelConfig, time_in_ms: f64, image: &[u32]) -> Result<()> {
let mut buffer = BufWriter::new(try!(File::create(file_name)));
try!(buffer.write(b"P3\n"));
try!(write!(buffer, "# mandelbrot, max_iter: {}\n", mandel_config.max_iter));
if mandel_config.write_metadata {
// TODO: add more meta data: date and time, method, ...
try!(write!(buffer, "# computation time: {} ms\n", time_in_ms));
}
try!(write!(buffer, "{0} {0}\n", mandel_config.img_size));
try!(buffer.write(b"255\n"));
let mut img_value: u32;
for y in 0..mandel_config.img_size {
for x in 0..mandel_config.img_size {
img_value = image[((y * mandel_config.img_size) + x) as usize];
if img_value == mandel_config.max_iter {
try!(buffer.write(b"0 0 0 "));
} else {
try!(write!(buffer, "255 {} 0 ", (img_value % 16) * 16));
}
}
try!(buffer.write(b"\n"));
}
Ok(())
}
fn write_benchmark_result(method: &str, num_threads: u32,
time_in_ms: f64, min_time: f64, max_time: f64) -> Result<()> {
// Check if output folder "plot" is available:
if !Path::new("plot").exists() {
// If not, create it!
println!("Folder 'plot' does not exist, creating it...");<|fim▁hole|>
}
let mut buffer = BufWriter::new(try!(
OpenOptions::new()
.write(true)
.append(true)
.create(true)
.open(format!("plot{}{}.txt", std::path::MAIN_SEPARATOR, method))));
try!(write!(buffer, "{} {} {} {}\n", num_threads, time_in_ms, min_time, max_time));
Ok(())
}
// Prepares and runs one version of the mandelbrot set calculation.
pub fn do_run(method: &str, mandel_func: &Fn(&MandelConfig, &mut [u32]) -> (),
mandel_config: &MandelConfig, image: &mut [u32], time_now: &str) {
let mut repetitive_times = Vec::new();
let mut min_time = std::f64::MAX;
let mut max_time = 0.0;
for _ in 0..mandel_config.num_of_runs {
let start_time = precise_time_ns();
mandel_func(mandel_config, image);
let end_time = precise_time_ns();
let total_time_in_ms = ((end_time - start_time) as f64) / (1000.0 * 1000.0);
if total_time_in_ms > max_time {
max_time = total_time_in_ms;
}
if total_time_in_ms < min_time {
min_time = total_time_in_ms;
}
repetitive_times.push(total_time_in_ms);
}
let mean_time = repetitive_times.iter().fold(0.0, |sum, t| sum + t) /
(mandel_config.num_of_runs as f64);
println!("Time taken for this run ({}): {:.5} ms", method, mean_time);
write_benchmark_result(&method, mandel_config.num_threads, mean_time,
min_time, max_time).expect("I/O error while writing benchmark results");
if !mandel_config.no_ppm {
let file_name = format!("{}_{}.ppm", method, &time_now);
write_image(&file_name, &mandel_config, mean_time, &image).expect(
&format!("I/O error while writing image: '{}'", file_name));
}
}<|fim▁end|> | try!(fs::create_dir("plot")); |
<|file_name|>react-tests.js<|end_file_name|><|fim▁begin|>/* eslint-env mocha */
const { expect } = chai;
import React from './React';
import TestUtils from './TestUtils';
describe('React components', () => {
it('should find valid xpath in react component', () => {
const component = TestUtils.renderIntoDocument(<blink>hi</blink>);
expect(component).to.have.xpath('//blink');
});
it('should find valid xpath in react component twice', () => {
const component = TestUtils.renderIntoDocument(<blink>hi</blink>);
expect(component).to.have.xpath('//blink');
expect(component).to.have.xpath('//blink');
});
describe('when it does not find valid xpath in react component', () => {
it('should throw', () => {
const component = TestUtils.renderIntoDocument(<blink>hi</blink>);
expect(() => {
expect(component).to.have.xpath('//h1');
}).to.throw('to have xpath \'//h1\'');
});
it('should throw with outerHTML of the component', () => {
const component = TestUtils.renderIntoDocument(<blink>hi</blink>);
expect(() => {
expect(component).to.have.xpath('//h1');
}).to.throw('hi</blink>');
});<|fim▁hole|><|fim▁end|> | });
}); |
<|file_name|>drop_rows_by_keywords.py<|end_file_name|><|fim▁begin|># vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
def drop_rows_by_keywords(self, keywords_values_dict):
"""
Drop the rows based on dictionary of {"keyword":"value"}(applying 'and' operation on dictionary) from column holding xml string.
Ex: keywords_values_dict -> {"SOPInstanceUID":"1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685", "Manufacturer":"SIEMENS", "StudyDate":"20030315"}
Parameters
----------
:param keywords_values_dict: (dict(str, str)) dictionary of keywords and values from xml string in metadata
Examples
--------
>>> dicom_path = "../datasets/dicom_uncompressed"
>>> dicom = tc.dicom.import_dcm(dicom_path)
>>> dicom.metadata.count()
3
<skip>
>>> dicom.metadata.inspect(truncate=30)
[#] id metadata
=======================================
[0] 0 <?xml version="1.0" encodin...
[1] 1 <?xml version="1.0" encodin...
[2] 2 <?xml version="1.0" encodin...
</skip>
#Part of xml string looks as below<|fim▁hole|> <DicomAttribute keyword="MediaStorageSOPInstanceUID" tag="00020003" vr="UI"><Value number="1">1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685</Value></DicomAttribute>
...
>>> keywords_values_dict = {"SOPInstanceUID":"1.3.6.1.4.1.14519.5.2.1.7308.2101.234736319276602547946349519685", "Manufacturer":"SIEMENS", "StudyDate":"20030315"}
>>> dicom.drop_rows_by_keywords(keywords_values_dict)
>>> dicom.metadata.count()
2
<skip>
#After drop_rows
>>> dicom.metadata.inspect(truncate=30)
[#] id metadata
=======================================
[0] 1 <?xml version="1.0" encodin...
[1] 2 <?xml version="1.0" encodin...
>>> dicom.pixeldata.inspect(truncate=30)
[#] id imagematrix
===========================================================
[0] 1 [[ 0. 0. 0. ..., 0. 0. 0.]
[ 0. 70. 85. ..., 215. 288. 337.]
[ 0. 63. 72. ..., 228. 269. 317.]
...,
[ 0. 42. 40. ..., 966. 919. 871.]
[ 0. 42. 33. ..., 988. 887. 860.]
[ 0. 46. 38. ..., 983. 876. 885.]]
[1] 2 [[ 0. 0. 0. ..., 0. 0. 0.]
[ 0. 111. 117. ..., 159. 148. 135.]
[ 0. 116. 111. ..., 152. 138. 139.]
...,
[ 0. 49. 18. ..., 1057. 965. 853.]
[ 0. 42. 20. ..., 1046. 973. 891.]
[ 0. 48. 26. ..., 1041. 969. 930.]]
</skip>
"""
if not isinstance(keywords_values_dict, dict):
raise TypeError("keywords_values_dict should be a type of dict, but found type as %" % type(keywords_values_dict))
for key, value in keywords_values_dict.iteritems():
if not isinstance(key, basestring) or not isinstance(value, basestring):
raise TypeError("both keyword and value should be of <type 'str'>")
#Always scala dicom is invoked, as python joins are expensive compared to serailizations.
def f(scala_dicom):
scala_dicom.dropRowsByKeywords(self._tc.jutils.convert.to_scala_map(keywords_values_dict))
self._call_scala(f)<|fim▁end|> | <?xml version="1.0" encoding="UTF-8"?>
<NativeDicomModel xml:space="preserve">
<DicomAttribute keyword="FileMetaInformationVersion" tag="00020001" vr="OB"><InlineBinary>AAE=</InlineBinary></DicomAttribute>
<DicomAttribute keyword="MediaStorageSOPClassUID" tag="00020002" vr="UI"><Value number="1">1.2.840.10008.5.1.4.1.1.4</Value></DicomAttribute> |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|># pylint: disable=redefined-outer-name
import os
import pytest
from pylint import checkers
from pylint.lint import PyLinter
# pylint: disable=no-name-in-module
from pylint.testutils import MinimalTestReporter
@pytest.fixture
def linter(checker, register, enable, disable, reporter):
_linter = PyLinter()
_linter.set_reporter(reporter())
checkers.initialize(_linter)
if register:
register(_linter)
if checker:
_linter.register_checker(checker(_linter))
if disable:
for msg in disable:
_linter.disable(msg)
if enable:
for msg in enable:
_linter.enable(msg)
os.environ.pop('PYLINTRC', None)
return _linter<|fim▁hole|>
@pytest.fixture(scope='module')
def checker():
return None
@pytest.fixture(scope='module')
def register():
return None
@pytest.fixture(scope='module')
def enable():
return None
@pytest.fixture(scope='module')
def disable():
return None
@pytest.fixture(scope='module')
def reporter():
return MinimalTestReporter<|fim▁end|> | |
<|file_name|>interfaces.py<|end_file_name|><|fim▁begin|>import os
import time
from abc import abstractmethod, ABC
from typing import Dict, Tuple, List
from cereal import car
from common.kalman.simple_kalman import KF1D
from common.realtime import DT_CTRL
from selfdrive.car import gen_empty_fingerprint
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.drive_helpers import V_CRUISE_MAX
from selfdrive.controls.lib.events import Events
from selfdrive.controls.lib.vehicle_model import VehicleModel
GearShifter = car.CarState.GearShifter
EventName = car.CarEvent.EventName
MAX_CTRL_SPEED = (V_CRUISE_MAX + 4) * CV.KPH_TO_MS
ACCEL_MAX = 2.0
ACCEL_MIN = -3.5
# generic car and radar interfaces
<|fim▁hole|>
self.frame = 0
self.steering_unpressed = 0
self.low_speed_alert = False
self.silent_steer_warning = True
if CarState is not None:
self.CS = CarState(CP)
self.cp = self.CS.get_can_parser(CP)
self.cp_cam = self.CS.get_cam_can_parser(CP)
self.cp_body = self.CS.get_body_can_parser(CP)
self.cp_loopback = self.CS.get_loopback_can_parser(CP)
self.CC = None
if CarController is not None:
self.CC = CarController(self.cp.dbc_name, CP, self.VM)
@staticmethod
def get_pid_accel_limits(CP, current_speed, cruise_speed):
return ACCEL_MIN, ACCEL_MAX
@staticmethod
@abstractmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), car_fw=None):
pass
@staticmethod
def init(CP, logcan, sendcan):
pass
@staticmethod
def get_steer_feedforward_default(desired_angle, v_ego):
# Proportional to realigning tire momentum: lateral acceleration.
# TODO: something with lateralPlan.curvatureRates
return desired_angle * (v_ego**2)
@classmethod
def get_steer_feedforward_function(cls):
return cls.get_steer_feedforward_default
# returns a set of default params to avoid repetition in car specific params
@staticmethod
def get_std_params(candidate, fingerprint):
ret = car.CarParams.new_message()
ret.carFingerprint = candidate
ret.unsafeMode = 0 # see panda/board/safety_declarations.h for allowed values
# standard ALC params
ret.steerControlType = car.CarParams.SteerControlType.torque
ret.steerMaxBP = [0.]
ret.steerMaxV = [1.]
ret.minSteerSpeed = 0.
ret.wheelSpeedFactor = 1.0
ret.pcmCruise = True # openpilot's state is tied to the PCM's cruise state on most cars
ret.minEnableSpeed = -1. # enable is done by stock ACC, so ignore this
ret.steerRatioRear = 0. # no rear steering, at least on the listed cars aboveA
ret.openpilotLongitudinalControl = False
ret.stopAccel = -2.0
ret.stoppingDecelRate = 0.8 # brake_travel/s while trying to stop
ret.vEgoStopping = 0.5
ret.vEgoStarting = 0.5
ret.stoppingControl = True
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
ret.longitudinalTuning.kf = 1.
ret.longitudinalTuning.kpBP = [0.]
ret.longitudinalTuning.kpV = [1.]
ret.longitudinalTuning.kiBP = [0.]
ret.longitudinalTuning.kiV = [1.]
# TODO estimate car specific lag, use .15s for now
ret.longitudinalActuatorDelayLowerBound = 0.15
ret.longitudinalActuatorDelayUpperBound = 0.15
ret.steerLimitTimer = 1.0
return ret
@abstractmethod
def update(self, c: car.CarControl, can_strings: List[bytes]) -> car.CarState:
pass
@abstractmethod
def apply(self, c: car.CarControl) -> Tuple[car.CarControl.Actuators, List[bytes]]:
pass
def create_common_events(self, cs_out, extra_gears=None, pcm_enable=True):
events = Events()
if cs_out.doorOpen:
events.add(EventName.doorOpen)
if cs_out.seatbeltUnlatched:
events.add(EventName.seatbeltNotLatched)
if cs_out.gearShifter != GearShifter.drive and (extra_gears is None or
cs_out.gearShifter not in extra_gears):
events.add(EventName.wrongGear)
if cs_out.gearShifter == GearShifter.reverse:
events.add(EventName.reverseGear)
if not cs_out.cruiseState.available:
events.add(EventName.wrongCarMode)
if cs_out.espDisabled:
events.add(EventName.espDisabled)
if cs_out.gasPressed:
events.add(EventName.gasPressed)
if cs_out.stockFcw:
events.add(EventName.stockFcw)
if cs_out.stockAeb:
events.add(EventName.stockAeb)
if cs_out.vEgo > MAX_CTRL_SPEED:
events.add(EventName.speedTooHigh)
if cs_out.cruiseState.nonAdaptive:
events.add(EventName.wrongCruiseMode)
if cs_out.brakeHoldActive and self.CP.openpilotLongitudinalControl:
events.add(EventName.brakeHold)
# Handle permanent and temporary steering faults
self.steering_unpressed = 0 if cs_out.steeringPressed else self.steering_unpressed + 1
if cs_out.steerFaultTemporary:
# if the user overrode recently, show a less harsh alert
if self.silent_steer_warning or cs_out.standstill or self.steering_unpressed < int(1.5 / DT_CTRL):
self.silent_steer_warning = True
events.add(EventName.steerTempUnavailableSilent)
else:
events.add(EventName.steerTempUnavailable)
else:
self.silent_steer_warning = False
if cs_out.steerFaultPermanent:
events.add(EventName.steerUnavailable)
# Disable on rising edge of gas or brake. Also disable on brake when speed > 0.
if (cs_out.gasPressed and not self.CS.out.gasPressed) or \
(cs_out.brakePressed and (not self.CS.out.brakePressed or not cs_out.standstill)):
events.add(EventName.pedalPressed)
# we engage when pcm is active (rising edge)
if pcm_enable:
if cs_out.cruiseState.enabled and not self.CS.out.cruiseState.enabled:
events.add(EventName.pcmEnable)
elif not cs_out.cruiseState.enabled:
events.add(EventName.pcmDisable)
return events
class RadarInterfaceBase(ABC):
def __init__(self, CP):
self.pts = {}
self.delay = 0
self.radar_ts = CP.radarTimeStep
self.no_radar_sleep = 'NO_RADAR_SLEEP' in os.environ
def update(self, can_strings):
ret = car.RadarData.new_message()
if not self.no_radar_sleep:
time.sleep(self.radar_ts) # radard runs on RI updates
return ret
class CarStateBase(ABC):
def __init__(self, CP):
self.CP = CP
self.car_fingerprint = CP.carFingerprint
self.out = car.CarState.new_message()
self.cruise_buttons = 0
self.left_blinker_cnt = 0
self.right_blinker_cnt = 0
self.left_blinker_prev = False
self.right_blinker_prev = False
# Q = np.matrix([[10.0, 0.0], [0.0, 100.0]])
# R = 1e3
self.v_ego_kf = KF1D(x0=[[0.0], [0.0]],
A=[[1.0, DT_CTRL], [0.0, 1.0]],
C=[1.0, 0.0],
K=[[0.12287673], [0.29666309]])
def update_speed_kf(self, v_ego_raw):
if abs(v_ego_raw - self.v_ego_kf.x[0][0]) > 2.0: # Prevent large accelerations when car starts at non zero speed
self.v_ego_kf.x = [[v_ego_raw], [0.0]]
v_ego_x = self.v_ego_kf.update(v_ego_raw)
return float(v_ego_x[0]), float(v_ego_x[1])
def get_wheel_speeds(self, fl, fr, rl, rr, unit=CV.KPH_TO_MS):
factor = unit * self.CP.wheelSpeedFactor
wheelSpeeds = car.CarState.WheelSpeeds.new_message()
wheelSpeeds.fl = fl * factor
wheelSpeeds.fr = fr * factor
wheelSpeeds.rl = rl * factor
wheelSpeeds.rr = rr * factor
return wheelSpeeds
def update_blinker_from_lamp(self, blinker_time: int, left_blinker_lamp: bool, right_blinker_lamp: bool):
"""Update blinkers from lights. Enable output when light was seen within the last `blinker_time`
iterations"""
# TODO: Handle case when switching direction. Now both blinkers can be on at the same time
self.left_blinker_cnt = blinker_time if left_blinker_lamp else max(self.left_blinker_cnt - 1, 0)
self.right_blinker_cnt = blinker_time if right_blinker_lamp else max(self.right_blinker_cnt - 1, 0)
return self.left_blinker_cnt > 0, self.right_blinker_cnt > 0
def update_blinker_from_stalk(self, blinker_time: int, left_blinker_stalk: bool, right_blinker_stalk: bool):
"""Update blinkers from stalk position. When stalk is seen the blinker will be on for at least blinker_time,
or until the stalk is turned off, whichever is longer. If the opposite stalk direction is seen the blinker
is forced to the other side. On a rising edge of the stalk the timeout is reset."""
if left_blinker_stalk:
self.right_blinker_cnt = 0
if not self.left_blinker_prev:
self.left_blinker_cnt = blinker_time
if right_blinker_stalk:
self.left_blinker_cnt = 0
if not self.right_blinker_prev:
self.right_blinker_cnt = blinker_time
self.left_blinker_cnt = max(self.left_blinker_cnt - 1, 0)
self.right_blinker_cnt = max(self.right_blinker_cnt - 1, 0)
self.left_blinker_prev = left_blinker_stalk
self.right_blinker_prev = right_blinker_stalk
return bool(left_blinker_stalk or self.left_blinker_cnt > 0), bool(right_blinker_stalk or self.right_blinker_cnt > 0)
@staticmethod
def parse_gear_shifter(gear: str) -> car.CarState.GearShifter:
d: Dict[str, car.CarState.GearShifter] = {
'P': GearShifter.park, 'R': GearShifter.reverse, 'N': GearShifter.neutral,
'E': GearShifter.eco, 'T': GearShifter.manumatic, 'D': GearShifter.drive,
'S': GearShifter.sport, 'L': GearShifter.low, 'B': GearShifter.brake
}
return d.get(gear, GearShifter.unknown)
@staticmethod
def get_cam_can_parser(CP):
return None
@staticmethod
def get_body_can_parser(CP):
return None
@staticmethod
def get_loopback_can_parser(CP):
return None<|fim▁end|> | class CarInterfaceBase(ABC):
def __init__(self, CP, CarController, CarState):
self.CP = CP
self.VM = VehicleModel(CP) |
<|file_name|>test_kinesis_cloudformation.py<|end_file_name|><|fim▁begin|>import boto3
import sure # noqa
from moto import mock_kinesis, mock_cloudformation
@mock_cloudformation
def test_kinesis_cloudformation_create_stream():
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
stack_name = "MyStack"
template = '{"Resources":{"MyStream":{"Type":"AWS::Kinesis::Stream"}}}'
cf_conn.create_stack(StackName=stack_name, TemplateBody=template)
provisioned_resource = cf_conn.list_stack_resources(StackName=stack_name)[
"StackResourceSummaries"
][0]
provisioned_resource["LogicalResourceId"].should.equal("MyStream")
len(provisioned_resource["PhysicalResourceId"]).should.be.greater_than(0)
@mock_cloudformation
@mock_kinesis
def test_kinesis_cloudformation_get_attr():
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
stack_name = "MyStack"
template = """
Resources:
TheStream:
Type: AWS::Kinesis::Stream
Outputs:
StreamName:
Value: !Ref TheStream
StreamArn:
Value: !GetAtt TheStream.Arn
""".strip()
cf_conn.create_stack(StackName=stack_name, TemplateBody=template)
stack_description = cf_conn.describe_stacks(StackName=stack_name)["Stacks"][0]
output_stream_name = [
output["OutputValue"]
for output in stack_description["Outputs"]
if output["OutputKey"] == "StreamName"
][0]
output_stream_arn = [
output["OutputValue"]
for output in stack_description["Outputs"]
if output["OutputKey"] == "StreamArn"
][0]
kinesis_conn = boto3.client("kinesis", region_name="us-east-1")
stream_description = kinesis_conn.describe_stream(StreamName=output_stream_name)[
"StreamDescription"
]
output_stream_arn.should.equal(stream_description["StreamARN"])
@mock_cloudformation
@mock_kinesis
def test_kinesis_cloudformation_update():
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
stack_name = "MyStack"
template = """
Resources:
TheStream:
Type: AWS::Kinesis::Stream
Properties:
Name: MyStream
ShardCount: 4
RetentionPeriodHours: 48
Tags:
- Key: TagKey1
Value: TagValue1
- Key: TagKey2
Value: TagValue2
""".strip()
cf_conn.create_stack(StackName=stack_name, TemplateBody=template)
stack_description = cf_conn.describe_stacks(StackName=stack_name)["Stacks"][0]
stack_description["StackName"].should.equal(stack_name)
kinesis_conn = boto3.client("kinesis", region_name="us-east-1")
stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[
"StreamDescription"
]
stream_description["RetentionPeriodHours"].should.equal(48)
tags = kinesis_conn.list_tags_for_stream(StreamName="MyStream")["Tags"]
tag1_value = [tag for tag in tags if tag["Key"] == "TagKey1"][0]["Value"]
tag2_value = [tag for tag in tags if tag["Key"] == "TagKey2"][0]["Value"]
tag1_value.should.equal("TagValue1")
tag2_value.should.equal("TagValue2")
shards_provisioned = len(
[
shard
for shard in stream_description["Shards"]
if "EndingSequenceNumber" not in shard["SequenceNumberRange"]
]
)
shards_provisioned.should.equal(4)
template = """
Resources:
TheStream:
Type: AWS::Kinesis::Stream
Properties:
ShardCount: 6
RetentionPeriodHours: 24
Tags:
- Key: TagKey1
Value: TagValue1a
- Key: TagKey2
Value: TagValue2a
""".strip()
cf_conn.update_stack(StackName=stack_name, TemplateBody=template)
stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[
"StreamDescription"
]
stream_description["RetentionPeriodHours"].should.equal(24)
tags = kinesis_conn.list_tags_for_stream(StreamName="MyStream")["Tags"]
tag1_value = [tag for tag in tags if tag["Key"] == "TagKey1"][0]["Value"]
tag2_value = [tag for tag in tags if tag["Key"] == "TagKey2"][0]["Value"]
tag1_value.should.equal("TagValue1a")
tag2_value.should.equal("TagValue2a")
shards_provisioned = len(
[
shard
for shard in stream_description["Shards"]<|fim▁hole|> if "EndingSequenceNumber" not in shard["SequenceNumberRange"]
]
)
shards_provisioned.should.equal(6)
@mock_cloudformation
@mock_kinesis
def test_kinesis_cloudformation_delete():
cf_conn = boto3.client("cloudformation", region_name="us-east-1")
stack_name = "MyStack"
template = """
Resources:
TheStream:
Type: AWS::Kinesis::Stream
Properties:
Name: MyStream
""".strip()
cf_conn.create_stack(StackName=stack_name, TemplateBody=template)
stack_description = cf_conn.describe_stacks(StackName=stack_name)["Stacks"][0]
stack_description["StackName"].should.equal(stack_name)
kinesis_conn = boto3.client("kinesis", region_name="us-east-1")
stream_description = kinesis_conn.describe_stream(StreamName="MyStream")[
"StreamDescription"
]
stream_description["StreamName"].should.equal("MyStream")
cf_conn.delete_stack(StackName=stack_name)
streams = kinesis_conn.list_streams()["StreamNames"]
len(streams).should.equal(0)<|fim▁end|> | |
<|file_name|>integration.spec.ts<|end_file_name|><|fim▁begin|>// TODO: write a test that ensures that Quagga.decodeSingle returns a Promise when it should
// TODO: write a test that tests the multiple: true decoding option, allowing for multiple barcodes in
// a single image to be returned.
// TODO: write a test that allows for locate: false and locator configs to be tested.
import Quagga from '../../src/quagga';
import { QuaggaJSConfigObject } from '../../type-definitions/quagga';
import { expect } from 'chai';
import ExternalCode128Reader from '../../src/reader/code_128_reader';
// add it.allowFail see https://github.com/kellyselden/mocha-helpers/pull/4
// also see https://github.com/mochajs/mocha/issues/1480#issuecomment-487074628
if (typeof it.allowFail === 'undefined') {
it.allowFail = (title: string, callback: Function) => {
it(title, function() {
return Promise.resolve().then(() => {
return callback.apply(this, arguments);
}).catch((err) => {
console.trace('* error during test', err);
this.skip();
});
});
};
}
function runDecoderTest(name: string, config: QuaggaJSConfigObject, testSet: Array<{ name: string, result: string, format: string }>) {
describe(`Decoder ${name}`, () => {
testSet.forEach((sample) => {
it.allowFail(`decodes ${sample.name}`, async function() {
this.timeout(20000); // need to set a long timeout because laptops sometimes lag like hell in tests when they go low power
const thisConfig = {
...config,
src: `${typeof window !== 'undefined' ? '/' : ''}test/fixtures/${name}/${sample.name}`,
};
const result = await Quagga.decodeSingle(thisConfig);
// // console.warn(`* Expect result ${JSON.stringify(result)} to be an object`);
expect(result).to.be.an('Object');
expect(result.codeResult).to.be.an('Object');
expect(result.codeResult.code).to.equal(sample.result);
expect(result.codeResult.format).to.equal(sample.format);
expect(Quagga.canvas).to.be.an('Object');
expect(Quagga.canvas.dom).to.be.an('Object');
expect(Quagga.canvas.ctx).to.be.an('Object');
});
});
});
}
function generateConfig(configOverride: QuaggaJSConfigObject = {}) {
const config: QuaggaJSConfigObject = {
inputStream: {
size: 640,
...configOverride.inputStream,
},
locator: {
patchSize: 'medium',
halfSample: true,
...configOverride.locator,
},
numOfWorkers: 0,
decoder: {
readers: ['ean_reader'],
...configOverride.decoder,
},
locate: configOverride.locate,<|fim▁hole|> src: null,
};
return config;
}
describe('End-To-End Decoder Tests with Quagga.decodeSingle', () => {
runDecoderTest('ean', generateConfig(), [
{ 'name': 'image-001.jpg', 'result': '3574660239843', format: 'ean_13' },
{ 'name': 'image-002.jpg', 'result': '8032754490297', format: 'ean_13' },
{ 'name': 'image-004.jpg', 'result': '9002233139084', format: 'ean_13' },
{ 'name': 'image-003.jpg', 'result': '4006209700068', format: 'ean_13' },
{ 'name': 'image-005.jpg', 'result': '8004030044005', format: 'ean_13' },
{ 'name': 'image-006.jpg', 'result': '4003626011159', format: 'ean_13' },
{ 'name': 'image-007.jpg', 'result': '2111220009686', format: 'ean_13' },
{ 'name': 'image-008.jpg', 'result': '9000275609022', format: 'ean_13' },
{ 'name': 'image-009.jpg', 'result': '9004593978587', format: 'ean_13' },
{ 'name': 'image-010.jpg', 'result': '9002244845578', format: 'ean_13' },
]);
// TODO: note that the FORMAT reported from a supplement equals the parent. What exactly is the
// difference between a supplement and a separate reader? is it just semantic?
runDecoderTest('ean_extended', generateConfig({
inputStream: {
size: 800,
singleChannel: false,
},
decoder: {
readers: [{
format: 'ean_reader',
config: {
supplements: [
'ean_5_reader',
'ean_2_reader',
],
},
}],
},
}), [
{ 'name': 'image-001.jpg', 'result': '900437801102701', format: 'ean_13' },
{ 'name': 'image-002.jpg', 'result': '419871600890101', format: 'ean_13' },
{ 'name': 'image-003.jpg', 'result': '419871600890101', format: 'ean_13' },
{ 'name': 'image-004.jpg', 'result': '978054466825652495', format: 'ean_13' },
{ 'name': 'image-005.jpg', 'result': '419664190890712', format: 'ean_13' },
{ 'name': 'image-006.jpg', 'result': '412056690699101', format: 'ean_13' },
{ 'name': 'image-007.jpg', 'result': '419204531290601', format: 'ean_13' },
{ 'name': 'image-008.jpg', 'result': '419871600890101', format: 'ean_13' },
{ 'name': 'image-009.jpg', 'result': '978054466825652495', format: 'ean_13' },
{ 'name': 'image-010.jpg', 'result': '900437801102701', format: 'ean_13' },
]);
runDecoderTest('code_128', {
inputStream: {
size: 800,
singleChannel: false,
}
}, [
{ 'name': 'image-001.jpg', 'result': '0001285112001000040801', format: 'code_128' },
{ 'name': 'image-002.jpg', 'result': 'FANAVF14617104', format: 'code_128' },
{ 'name': 'image-003.jpg', 'result': '673023', format: 'code_128' },
{ 'name': 'image-004.jpg', 'result': '010210150301625334', format: 'code_128' },
{ 'name': 'image-005.jpg', 'result': '419055603900009001012999', format: 'code_128' },
{ 'name': 'image-006.jpg', 'result': '419055603900009001012999', format: 'code_128' },
{ 'name': 'image-007.jpg', 'result': '420957479499907123456123456781', format: 'code_128' },
{ 'name': 'image-008.jpg', 'result': '1020185021797280784055', format: 'code_128' },
{ 'name': 'image-009.jpg', 'result': '0001285112001000040801', format: 'code_128' },
{ 'name': 'image-010.jpg', 'result': '673023', format: 'code_128' },
// TODO: need to implement having different inputStream parameters to be able to
// read this one -- it works only with inputStream size set to 1600 presently, but
// other samples break at that high a size.
// { name: 'image-011.png', result: '33c64780-a9c0-e92a-820c-fae7011c11e2' },
]);
runDecoderTest(
'code_39',
generateConfig({
decoder: {
readers: ['code_39_reader'],
}
}), [
{ 'name': 'image-001.jpg', 'result': 'B3% $DAD$', format: 'code_39' },
{ 'name': 'image-003.jpg', 'result': 'CODE39', format: 'code_39' },
{ 'name': 'image-004.jpg', 'result': 'QUAGGAJS', format: 'code_39' },
{ 'name': 'image-005.jpg', 'result': 'CODE39', format: 'code_39' },
{ 'name': 'image-006.jpg', 'result': '2/4-8/16-32', format: 'code_39' },
{ 'name': 'image-007.jpg', 'result': '2/4-8/16-32', format: 'code_39' },
{ 'name': 'image-008.jpg', 'result': 'CODE39', format: 'code_39' },
{ 'name': 'image-009.jpg', 'result': '2/4-8/16-32', format: 'code_39' },
// TODO: image 10 in this set appears to be dependent upon #191
{ 'name': 'image-010.jpg', 'result': 'CODE39', format: 'code_39' },
{ 'name': 'image-011.jpg', 'result': '4', format: 'code_39' },
]);
runDecoderTest(
'code_39_vin',
generateConfig({
inputStream: {
size: 1280,
sequence: false,
},
locator: {
halfSample: false,
},
decoder: {
readers: ['code_39_vin_reader'],
},
}),
[
{ name: 'image-001.jpg', result: '2HGFG1B86BH501831', format: 'code_39_vin' },
{ name: 'image-002.jpg', result: 'JTDKB20U887718156', format: 'code_39_vin' },
// image-003 only works on the second run of a decode of it and only in browser?! wtf?
{ name: 'image-003.jpg', result: 'JM1BK32G071773697', format: 'code_39_vin' },
{ name: 'image-004.jpg', result: 'WDBTK75G94T028954', format: 'code_39_vin' },
{ name: 'image-005.jpg', result: '3VW2K7AJ9EM381173', format: 'code_39_vin' },
{ name: 'image-006.jpg', result: 'JM1BL1H4XA1335663', format: 'code_39_vin' },
{ name: 'image-007.jpg', result: 'JHMGE8H42AS021233', format: 'code_39_vin' },
{ name: 'image-008.jpg', result: 'WMEEJ3BA4DK652562', format: 'code_39_vin' },
{ name: 'image-009.jpg', result: 'WMEEJ3BA4DK652562', format: 'code_39_vin' }, //yes, 8 and 9 are same barcodes, different images slightly
{ name: 'image-010.jpg', result: 'WMEEJ3BA4DK652562', format: 'code_39_vin' }, // 10 also
{ name: 'image-011.jpg', result: '5FNRL38488B411196', format: 'code_39_vin' },
]
);
runDecoderTest(
'code_32',
generateConfig({
inputStream: {
size: 1280,
},
locator: {
patchSize: 'large',
halfSample: true,
},
numOfWorkers: 4,
decoder: {
readers: ['code_32_reader']
}
}),
[
{ name: 'image-1.jpg', result: 'A123456788', format: 'code_32_reader' },
{ name: 'image-2.jpg', result: 'A931028462', format: 'code_32_reader' },
{ name: 'image-3.jpg', result: 'A931028462', format: 'code_32_reader' },
{ name: 'image-4.jpg', result: 'A935776043', format: 'code_32_reader' },
{ name: 'image-5.jpg', result: 'A935776043', format: 'code_32_reader' },
{ name: 'image-6.jpg', result: 'A012745182', format: 'code_32_reader' },
{ name: 'image-7.jpg', result: 'A029651039', format: 'code_32_reader' },
{ name: 'image-8.jpg', result: 'A029651039', format: 'code_32_reader' },
{ name: 'image-9.jpg', result: 'A015896018', format: 'code_32_reader' },
{ name: 'image-10.jpg', result: 'A015896018', format: 'code_32_reader' },
]
);
runDecoderTest(
'ean_8',
generateConfig({ decoder: { readers: ['ean_8_reader'] } }),
[
{ 'name': 'image-001.jpg', 'result': '42191605', format: 'ean_8' },
{ 'name': 'image-002.jpg', 'result': '42191605', format: 'ean_8' },
{ 'name': 'image-003.jpg', 'result': '90311208', format: 'ean_8' },
// TODO: image-004 fails in browser, this is new to running in cypress vs PhantomJS. It does not fail in node. Likely similar problem to #190
{ 'name': 'image-004.jpg', 'result': '24057257', format: 'ean_8' },
// {"name": "image-005.jpg", "result": "90162602"},
{ 'name': 'image-006.jpg', 'result': '24036153', format: 'ean_8' },
// {"name": "image-007.jpg", "result": "42176817"},
{ 'name': 'image-008.jpg', 'result': '42191605', format: 'ean_8' },
{ 'name': 'image-009.jpg', 'result': '42242215', format: 'ean_8' },
{ 'name': 'image-010.jpg', 'result': '42184799', format: 'ean_8' },
]
);
runDecoderTest(
'upc',
generateConfig({ decoder: { readers: ['upc_reader'] } }),
[
{ 'name': 'image-001.jpg', 'result': '882428015268', format: 'upc_a' },
{ 'name': 'image-002.jpg', 'result': '882428015268', format: 'upc_a' },
{ 'name': 'image-003.jpg', 'result': '882428015084', format: 'upc_a' },
{ 'name': 'image-004.jpg', 'result': '882428015343', format: 'upc_a' },
{ 'name': 'image-005.jpg', 'result': '882428015343', format: 'upc_a' },
{ 'name': 'image-006.jpg', 'result': '882428015046', format: 'upc_a' },
{ 'name': 'image-007.jpg', 'result': '882428015084', format: 'upc_a' },
{ 'name': 'image-008.jpg', 'result': '882428015046', format: 'upc_a' },
{ 'name': 'image-009.jpg', 'result': '039047013551', format: 'upc_a' },
{ 'name': 'image-010.jpg', 'result': '039047013551', format: 'upc_a' },
]
);
runDecoderTest(
'upc_e',
generateConfig({ decoder: { readers: ['upc_e_reader'] } }),
[
{ 'name': 'image-001.jpg', 'result': '04965802', format: 'upc_e' },
{ 'name': 'image-002.jpg', 'result': '04965802', format: 'upc_e' },
{ 'name': 'image-003.jpg', 'result': '03897425', format: 'upc_e' },
{ 'name': 'image-004.jpg', 'result': '05096893', format: 'upc_e' },
{ 'name': 'image-005.jpg', 'result': '05096893', format: 'upc_e' },
{ 'name': 'image-006.jpg', 'result': '05096893', format: 'upc_e' },
{ 'name': 'image-007.jpg', 'result': '03897425', format: 'upc_e' },
{ 'name': 'image-008.jpg', 'result': '01264904', format: 'upc_e' },
{ 'name': 'image-009.jpg', 'result': '01264904', format: 'upc_e' },
{ 'name': 'image-010.jpg', 'result': '01264904', format: 'upc_e' },
]
);
runDecoderTest(
'codabar',
generateConfig({ decoder: { readers: ['codabar_reader'] } }),
[
{ 'name': 'image-001.jpg', 'result': 'A10/53+17-70D', format: 'codabar' },
{ 'name': 'image-002.jpg', 'result': 'B546745735B', format: 'codabar' },
{ 'name': 'image-003.jpg', 'result': 'C$399.95A', format: 'codabar' },
{ 'name': 'image-004.jpg', 'result': 'B546745735B', format: 'codabar' },
{ 'name': 'image-005.jpg', 'result': 'C$399.95A', format: 'codabar' },
{ 'name': 'image-006.jpg', 'result': 'B546745735B', format: 'codabar' },
{ 'name': 'image-007.jpg', 'result': 'C$399.95A', format: 'codabar' },
{ 'name': 'image-008.jpg', 'result': 'A16:9/4:3/3:2D', format: 'codabar' },
{ 'name': 'image-009.jpg', 'result': 'C$399.95A', format: 'codabar' },
{ 'name': 'image-010.jpg', 'result': 'C$399.95A', format: 'codabar' },
]
);
runDecoderTest(
'i2of5',
generateConfig({
inputStream: { size: 800, singleChannel: false },
locator: {
patchSize: 'small',
halfSample: false,
},
decoder: {
readers: ['i2of5_reader'],
},
}),
[
{ 'name': 'image-001.jpg', 'result': '2167361334', format: 'i2of5' },
{ 'name': 'image-002.jpg', 'result': '2167361334', format: 'i2of5' },
{ 'name': 'image-003.jpg', 'result': '2167361334', format: 'i2of5' },
{ 'name': 'image-004.jpg', 'result': '2167361334', format: 'i2of5' },
{ 'name': 'image-005.jpg', 'result': '2167361334', format: 'i2of5' },
]
);
runDecoderTest(
'2of5',
generateConfig({
inputStream: { size: 800, singleChannel: false },
decoder: {
readers: ['2of5_reader'],
},
}),
[
{ 'name': 'image-001.jpg', 'result': '9577149002', format: '2of5' },
{ 'name': 'image-002.jpg', 'result': '9577149002', format: '2of5' },
{ 'name': 'image-003.jpg', 'result': '5776158811', format: '2of5' },
{ 'name': 'image-004.jpg', 'result': '0463381455', format: '2of5' },
{ 'name': 'image-005.jpg', 'result': '3261594101', format: '2of5' },
{ 'name': 'image-006.jpg', 'result': '3261594101', format: '2of5' },
{ 'name': 'image-007.jpg', 'result': '3261594101', format: '2of5' },
{ 'name': 'image-008.jpg', 'result': '6730705801', format: '2of5' },
{ 'name': 'image-009.jpg', 'result': '5776158811', format: '2of5' },
{ 'name': 'image-010.jpg', 'result': '5776158811', format: '2of5' },
]
);
runDecoderTest(
'code_93',
generateConfig({
inputStream: { size: 800, singleChannel: false },
locator: {
patchSize: 'large',
halfSample: true,
},
decoder: {
readers: ['code_93_reader'],
},
}),
[
{ 'name': 'image-001.jpg', 'result': 'WIWV8ETQZ1', format: 'code_93' },
{ 'name': 'image-002.jpg', 'result': 'EH3C-%GU23RK3', format: 'code_93' },
{ 'name': 'image-003.jpg', 'result': 'O308SIHQOXN5SA/PJ', format: 'code_93' },
{ 'name': 'image-004.jpg', 'result': 'DG7Q$TV8JQ/EN', format: 'code_93' },
{ 'name': 'image-005.jpg', 'result': 'DG7Q$TV8JQ/EN', format: 'code_93' },
{ 'name': 'image-006.jpg', 'result': 'O308SIHQOXN5SA/PJ', format: 'code_93' },
{ 'name': 'image-007.jpg', 'result': 'VOFD1DB5A.1F6QU', format: 'code_93' },
{ 'name': 'image-008.jpg', 'result': 'WIWV8ETQZ1', format: 'code_93' },
{ 'name': 'image-009.jpg', 'result': '4SO64P4X8 U4YUU1T-', format: 'code_93' },
{ 'name': 'image-010.jpg', 'result': '4SO64P4X8 U4YUU1T-', format: 'code_93' },
]
);
});
describe('Parallel decoding works', () => {
it('decodeSingle running in parallel', async () => {
// TODO: we should throw in some other formats here too.
const testSet = [
{ 'name': 'image-001.jpg', 'result': '3574660239843', format: 'ean_13' },
{ 'name': 'image-002.jpg', 'result': '8032754490297', format: 'ean_13' },
{ 'name': 'image-004.jpg', 'result': '9002233139084', format: 'ean_13' },
{ 'name': 'image-003.jpg', 'result': '4006209700068', format: 'ean_13' },
{ 'name': 'image-005.jpg', 'result': '8004030044005', format: 'ean_13' },
{ 'name': 'image-006.jpg', 'result': '4003626011159', format: 'ean_13' },
{ 'name': 'image-007.jpg', 'result': '2111220009686', format: 'ean_13' },
{ 'name': 'image-008.jpg', 'result': '9000275609022', format: 'ean_13' },
{ 'name': 'image-009.jpg', 'result': '9004593978587', format: 'ean_13' },
{ 'name': 'image-010.jpg', 'result': '9002244845578', format: 'ean_13' },
];
const promises: Array<Promise<any>> = [];
testSet.forEach(sample => {
const config = generateConfig();
config.src = `${typeof window !== 'undefined' ? '/' : ''}test/fixtures/ean/${sample.name}`;
promises.push(Quagga.decodeSingle(config));
});
const results = await Promise.all(promises).catch((err) => { console.warn('* error decoding simultaneously', err); throw(err); });
const testResults = testSet.map(x => x.result);
results.forEach((r, index) => {
expect(r).to.be.an('object');
expect(r.codeResult).to.be.an('object');
expect(r.codeResult.code).to.equal(testResults[index]);
});
});
});
describe('External Reader Test, using stock code_128 reader', () => {
describe('works', () => {
before(() => {
Quagga.registerReader('external_code_128_reader', ExternalCode128Reader);
});
runDecoderTest(
'code_128',
generateConfig({
inputStream: {
size: 800,
singleChannel: false,
},
decoder: {
readers: ['external_code_128_reader'],
},
}),
[
{ 'name': 'image-001.jpg', 'result': '0001285112001000040801', format: 'code_128' },
{ 'name': 'image-002.jpg', 'result': 'FANAVF14617104', format: 'code_128' },
{ 'name': 'image-003.jpg', 'result': '673023', format: 'code_128' },
{ 'name': 'image-004.jpg', 'result': '010210150301625334', format: 'code_128' },
{ 'name': 'image-005.jpg', 'result': '419055603900009001012999', format: 'code_128' },
{ 'name': 'image-006.jpg', 'result': '419055603900009001012999', format: 'code_128' },
{ 'name': 'image-007.jpg', 'result': '420957479499907123456123456781', format: 'code_128' },
{ 'name': 'image-008.jpg', 'result': '1020185021797280784055', format: 'code_128' },
{ 'name': 'image-009.jpg', 'result': '0001285112001000040801', format: 'code_128' },
{ 'name': 'image-010.jpg', 'result': '673023', format: 'code_128' },
// TODO: need to implement having different inputStream parameters to be able to
// read this one -- it works only with inputStream size set to 1600 presently, but
// other samples break at that high a size.
// { name: 'image-011.png', result: '33c64780-a9c0-e92a-820c-fae7011c11e2' },
]
);
});
});<|fim▁end|> | |
<|file_name|>commands.rs<|end_file_name|><|fim▁begin|>use river::River;
pub use river::PeekResult;
/// Push command - stateless
///
/// Used to push messages to rivers like this:
///
/// ```
/// john::PushCommand::new().execute("river_name", "message");
/// ```
pub struct PushCommand;
impl PushCommand {
/// Constructor ::new()
///
/// Creates new instance of PushCommand
pub fn new() -> PushCommand {
PushCommand
}
/// Used to execute push command, specifying a river name and message
/// This can be called multiple times with different arguments
/// since PushCommand is stateless
pub fn execute(&self, river: &str, message: &str) {
River::new(river).push(message);
}
}
/// Peek command - stateless
///
/// Used to peek messages from rivers like this:
///
/// ```
/// // read latest message from river
/// john::PushCommand::new().execute("river name", "a message");
/// john::PushCommand::new().execute("river name", "a message 1");
/// john::PushCommand::new().execute("river name", "a message 2");
/// john::PushCommand::new().execute("river name", "a message 3");
/// john::PeekCommand::new().execute("river name", None);
///
/// // read message from river at specific offset
/// john::PeekCommand::new().execute("river name", Some(2));
/// ```
///
/// It returns Option < PeekResult >. When it was able to peek a message, the result will contain
/// peeked message and new offset to specify to peek command (if you want to get next message)
pub struct PeekCommand;
impl PeekCommand {
/// Constructor ::new()
///
/// Creates new instance of PeekCommand
pub fn new() -> PeekCommand {
PeekCommand
}
<|fim▁hole|> River::new(river).peek_at(offset)
}
}
/// Clear command - stateless
///
/// Used to clear messages from rivers like this:
///
/// ```
/// john::ClearCommand::new().execute("river_name");
/// ```
pub struct ClearCommand;
impl ClearCommand {
/// Constructor ::new()
///
/// Creates new instance of ClearCommand
pub fn new() -> ClearCommand {
ClearCommand
}
/// Used to execute push command, specifying a river name and message
/// This can be called multiple times with different arguments
/// since PushCommand is stateless
pub fn execute(&self, river: &str) {
River::new(river).destroy();
}
}<|fim▁end|> | /// Used to execute peek command, specifying a river name and optionally offset to peek at
pub fn execute(&self, river: &str, offset: Option < uint >) -> Option < PeekResult > { |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from pyramid.security import (
_get_authentication_policy
)
def my_get_authentication_policy(request):
# CRITICAL
# _get_authentication_policy(request)
# this method will return the instanciate singleton object that handle
# policy in pyramid app<|fim▁hole|> return _get_authentication_policy(request)<|fim▁end|> | # the policy object store keys from conf for generate token |
<|file_name|>CImageColorDef.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|>
#include <cstring>
bool
CImageColorDef::
getRGB(const std::string &name, double *r, double *g, double *b)
{
int ri, gi, bi;
if (! getRGBI(name, &ri, &gi, &bi))
return false;
double rgb_scale = 1.0/255.0;
*r = ri*rgb_scale;
*g = gi*rgb_scale;
*b = bi*rgb_scale;
return true;
}
bool
CImageColorDef::
getRGBI(const std::string &name, int *r, int *g, int *b)
{
int i;
std::string lname = CStrUtil::toLower(name);
const char *name1 = lname.c_str();
for (i = 0; color_def_data[i].name != 0; ++i)
if (strcmp(color_def_data[i].name, name1) == 0)
break;
if (color_def_data[i].name == 0)
return false;
*r = color_def_data[i].r;
*g = color_def_data[i].g;
*b = color_def_data[i].b;
return true;
}<|fim▁end|> | #include <CImageLibI.h>
#include <CImageColorDefP.h> |
<|file_name|>parameter_server_strategy.py<|end_file_name|><|fim▁begin|># Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes implementing a multi-worker ps DistributionStrategy."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
from tensorflow.python.distribute import cross_device_ops as cross_device_ops_lib
from tensorflow.python.distribute import device_util
from tensorflow.python.distribute import distribute_lib
from tensorflow.python.distribute import input_lib
from tensorflow.python.distribute import mirrored_strategy
from tensorflow.python.distribute import multi_worker_util
from tensorflow.python.distribute import numpy_dataset
from tensorflow.python.distribute import values
from tensorflow.python.distribute.cluster_resolver import SimpleClusterResolver
from tensorflow.python.distribute.cluster_resolver import TFConfigClusterResolver
from tensorflow.python.eager import context
from tensorflow.python.framework import device as tf_device
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import device_setter
from tensorflow.python.util import nest
from tensorflow.python.util.tf_export import tf_export
_LOCAL_CPU = "/device:CPU:0"
_LOCAL_GPU_0 = "/device:GPU:0"
# TODO(yuefengz): maybe cache variables on local CPU.
@tf_export("distribute.experimental.ParameterServerStrategy")
class ParameterServerStrategy(distribute_lib.DistributionStrategy):
"""A parameter server DistributionStrategy.
This strategy class works for both local training and between-graph replicated
training for multiple workers. It uses `TFConfigClusterResolver` to detect
configurations for multi-worker training. In multi-worker training mode, i.e.
`TFConfigClusterResolver` has detected 'TF_CONFIG' environment variable and
'TF_CONFIG' has a cluster spec, variables and updates to those variables are
assigned to parameter servers and other operations are assigned to workers.
In local training mode, variables are assigned to local CPU or the only GPU.
When each worker has more than one GPU, operations will be replicated on these
GPUs. In both cases, operations are replicated but variables are not and these
workers share a common view for which parameter server a variable is assigned
to.
This class assumes between-graph replication will be used and works on a graph
for a particular worker. Note that each graph and worker is independent.
This means that while each worker will synchronously compute a single gradient
update across all GPUs, updates between workers proceed asynchronously.
Operations that occur only on the first replica (such as incrementing the
global step), will occur on the first replica *of every worker*.
It is expected to call `call_for_each_replica(fn, ...)` for any
operations which potentially can be replicated across replicas (i.e. multiple
GPUs) even if there is only CPU or one GPU. When defining the `fn`, extra
caution needs to be taken:
1) It is generally not recommended to open a device scope under the strategy's
scope. A device scope (i.e. calling `tf.device`) will be merged with or
override the device for operations but will not change the device for
variables.
2) It is also not recommended to open a colocation scope (i.e. calling
`tf.colocate_with`) under the strategy's scope. For colocating variables, use
`strategy.extended.colocate_vars_with` instead. Colocation of ops will
possibly create conflicts of device assignment.
"""
def __init__(self):
"""Initializes this strategy with default TFConfigClusterResolver."""
super(ParameterServerStrategy, self).__init__(
ParameterServerStrategyExtended(self))
class ParameterServerStrategyExtended(
distribute_lib.DistributionStrategyExtended):
"""Implementation of ParameterServerStrategy."""
def __init__(self,
container_strategy,
cluster_resolver=TFConfigClusterResolver()):
super(ParameterServerStrategyExtended, self).__init__(container_strategy)
self._initialize_strategy(cluster_resolver)
# We typically don't need to do all-reduce in this strategy.
self._cross_device_ops = (
cross_device_ops_lib.ReductionToOneDevice(reduce_to_device=_LOCAL_CPU))
def _initialize_strategy(self, cluster_resolver):
if cluster_resolver.cluster_spec().as_dict():
self._initialize_multi_worker(cluster_resolver)
else:
self._initialize_local(cluster_resolver)
def _initialize_multi_worker(self, cluster_resolver):
"""Initialize devices for multiple workers.
It creates variable devices and compute devices. Variables and operations
will be assigned to them respectively. We have one compute device per
replica. The variable device is a device function or device string. The
default variable device assigns variables to parameter servers in a
round-robin fashion.
Args:
cluster_resolver: a descendant of `ClusterResolver` object.
Raises:
ValueError: if the cluster doesn't have ps jobs.
"""
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
if isinstance(cluster_resolver, TFConfigClusterResolver):
num_gpus = context.num_gpus()
else:
num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
# Save the num_gpus_per_worker for configure method.
self._num_gpus_per_worker = num_gpus
cluster_spec = cluster_resolver.cluster_spec()
task_type = cluster_resolver.task_type
task_id = cluster_resolver.task_id
if not task_type or task_id is None:
raise ValueError("When `cluster_spec` is given, you must also specify "
"`task_type` and `task_id`")
cluster_spec = multi_worker_util.normalize_cluster_spec(cluster_spec)
assert cluster_spec.as_dict()
worker_device = "/job:%s/task:%d" % (task_type, task_id)
self._input_host_device = numpy_dataset.SingleDevice(worker_device)
# Define compute devices which is a list of device strings and one for each
# replica. When there are GPUs, replicate operations on these GPUs.
# Otherwise, place operations on CPU.
if num_gpus > 0:
compute_devices = tuple(
"%s/device:GPU:%d" % (worker_device, i) for i in range(num_gpus))
else:
compute_devices = (worker_device,)
self._device_map = values.ReplicaDeviceMap(compute_devices)
self._input_workers = input_lib.InputWorkers(
self._device_map, [(worker_device, compute_devices)])
# In distributed mode, place variables on ps jobs in a round-robin fashion.
# Note that devices returned from `replica_device_setter` are not
# canonical and therefore we don't canonicalize all variable devices to
# make them consistent.
# TODO(yuefengz): support passing a strategy object to control variable
# assignment.
# TODO(yuefengz): merge the logic of replica_device_setter into this
# class.
num_ps_replicas = len(cluster_spec.as_dict().get("ps", []))
if num_ps_replicas == 0:
raise ValueError("The cluster spec needs to have `ps` jobs.")
self._variable_device = device_setter.replica_device_setter(
ps_tasks=num_ps_replicas,
worker_device=worker_device,
merge_devices=True,
cluster=cluster_spec)
# The `_parameter_devices` is needed for the `parameter_devices` property
# and is a list of all variable devices. Here parameter devices are all
# tasks of the "ps" job.
self._parameter_devices = tuple(map("/job:ps/task:{}".format,
range(num_ps_replicas)))
# Add a default device so that ops without specified devices will not end up
# on other workers.
self._default_device = worker_device
self._is_chief = multi_worker_util.is_chief(cluster_spec, task_type,
task_id)
self._cluster_spec = cluster_spec
self._task_type = task_type
self._task_id = task_id
logging.info(
"Multi-worker ParameterServerStrategy with "
"cluster_spec = %r, task_type = %r, task_id = %r, "
"num_ps_replicas = %r, is_chief = %r, device_map = %r, "
"variable_device = %r", cluster_spec.as_dict(), task_type, task_id,
num_ps_replicas, self._is_chief, self._device_map,
self._variable_device)
def _initialize_local(self, cluster_resolver):
"""Initialize internal devices for local training."""
worker_device = device_util.canonicalize("/device:CPU:0")
self._input_host_device = numpy_dataset.SingleDevice(worker_device)
# TODO(b/126786766): TFConfigClusterResolver returns wrong number of GPUs in
# some cases.
if isinstance(cluster_resolver, TFConfigClusterResolver):
num_gpus = context.num_gpus()
else:
num_gpus = cluster_resolver.num_accelerators().get("GPU", 0)
# Save the num_gpus_per_worker for configure method.
self._num_gpus_per_worker = num_gpus
# Define compute devices which is a list of device strings and one for each
# replica. When there are GPUs, replicate operations on these GPUs.
# Otherwise, place operations on CPU.
if num_gpus > 0:
compute_devices = tuple(map("/device:GPU:{}".format, range(num_gpus)))
else:
compute_devices = (_LOCAL_CPU,)
self._device_map = values.ReplicaDeviceMap(compute_devices)
self._input_workers = input_lib.InputWorkers(
self._device_map, [(worker_device, compute_devices)])
# If there is only one GPU, put everything on that GPU. Otherwise, place
# variables on CPU.
if num_gpus == 1:
assert len(compute_devices) == 1
self._variable_device = _LOCAL_GPU_0
self._parameter_devices = (_LOCAL_GPU_0,)
else:
self._variable_device = _LOCAL_CPU
self._parameter_devices = (_LOCAL_CPU,)
self._is_chief = True
self._cluster_spec = None
self._task_type = None
self._task_id = None
logging.info(
"ParameterServerStrategy with compute_devices = %r, "
"variable_device = %r", compute_devices, self._variable_device)
def _validate_colocate_with_variable(self, colocate_with_variable):
values.validate_colocate(colocate_with_variable, self)
def _make_dataset_iterator(self, dataset):
return input_lib.DatasetIterator(dataset, self._input_workers,
self._num_replicas_in_sync)
def _make_input_fn_iterator(
self,
input_fn,
replication_mode=distribute_lib.InputReplicationMode.PER_WORKER):
"""Distributes the dataset to each local GPU."""
if self._cluster_spec:
input_pipeline_id = multi_worker_util.id_in_cluster(
self._cluster_spec, self._task_type, self._task_id)
num_input_pipelines = multi_worker_util.worker_count(
self._cluster_spec, self._task_type)
else:
input_pipeline_id = 0
num_input_pipelines = 1
input_context = distribute_lib.InputContext(
num_input_pipelines=num_input_pipelines,
input_pipeline_id=input_pipeline_id,
num_replicas_in_sync=self._num_replicas_in_sync)
return input_lib.InputFunctionIterator(input_fn, self._input_workers,
[input_context])
def _experimental_make_numpy_dataset(self, numpy_input, session):
return numpy_dataset.one_host_numpy_dataset(
numpy_input, self._input_host_device, session)
def _broadcast_to(self, tensor, destinations):
# This is both a fast path for Python constants, and a way to delay
# converting Python values to a tensor until we know what type it
# should be converted to. Otherwise we have trouble with:
# global_step.assign_add(1)
# since the `1` gets broadcast as an int32 but global_step is int64.
if isinstance(tensor, (float, int)):
return tensor
if not cross_device_ops_lib.check_destinations(destinations):
# TODO(josh11b): Use current logical device instead of 0 here.
destinations = values.LogicalDeviceSpec(
device_map=self._device_map, logical_device=0)
return self._cross_device_ops.broadcast(tensor, destinations)
def _allow_variable_partition(self):
return not context.executing_eagerly()
# TODO(yuefengz): not all ops in device_setter.STANDARD_PS_OPS will go through
# this creator, such as "MutableHashTable".
def _create_variable(self, next_creator, *args, **kwargs):
if self._num_replicas_in_sync > 1:
aggregation = kwargs.pop("aggregation", vs.VariableAggregation.NONE)
if aggregation not in (
vs.VariableAggregation.NONE,
vs.VariableAggregation.SUM,
vs.VariableAggregation.MEAN,
vs.VariableAggregation.ONLY_FIRST_REPLICA
):
raise ValueError("Invalid variable aggregation mode: " + aggregation +
" for variable: " + kwargs["name"])
def var_creator(*args, **kwargs):
"""Create an AggregatingVariable and fix up collections."""
# Record what collections this variable should be added to.
collections = kwargs.pop("collections", None)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
kwargs["collections"] = []
# Create and wrap the variable.
v = next_creator(*args, **kwargs)
wrapped = values.AggregatingVariable(
self._container_strategy(), v, aggregation)
# Add the wrapped variable to the requested collections.
# The handling of eager mode and the global step matches
# ResourceVariable._init_from_args().
if not context.executing_eagerly():
g = ops.get_default_graph()
# If "trainable" is True, next_creator() will add the contained
# variable to the TRAINABLE_VARIABLES collection, so we manually
# remove it and replace with the wrapper. We can't set "trainable"
# to False for next_creator() since that causes functions like
# implicit_gradients to skip those variables.
if kwargs.get("trainable", True):
collections.append(ops.GraphKeys.TRAINABLE_VARIABLES)
l = g.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES)
if v in l:
l.remove(v)
g.add_to_collections(collections, wrapped)
elif ops.GraphKeys.GLOBAL_STEP in collections:
ops.add_to_collections(ops.GraphKeys.GLOBAL_STEP, wrapped)
return wrapped
else:
var_creator = next_creator
if "colocate_with" in kwargs:
colocate_with = kwargs["colocate_with"]
if isinstance(colocate_with, numpy_dataset.SingleDevice):
with ops.device(colocate_with.device):
return var_creator(*args, **kwargs)
with ops.device(None):
with ops.colocate_with(colocate_with):
return var_creator(*args, **kwargs)
with ops.colocate_with(None, ignore_existing=True):
with ops.device(self._variable_device):
return var_creator(*args, **kwargs)
def _call_for_each_replica(self, fn, args, kwargs):
# pylint: disable=protected-access
return mirrored_strategy._call_for_each_replica(
self._container_strategy(), self._device_map, fn, args, kwargs)
def _verify_destinations_not_different_worker(self, destinations):
if not self._cluster_spec:
return
if destinations is None:
return
for d in cross_device_ops_lib.get_devices_from(destinations):
d_spec = tf_device.DeviceSpec.from_string(d)
if d_spec.job == self._task_type and d_spec.task != self._task_id:
raise ValueError(
"Cannot reduce to another worker: %r, current worker is %r" %
(d, self._input_workers.worker_devices[0]))
def _reduce_to(self, reduce_op, value, destinations):
self._verify_destinations_not_different_worker(destinations)
if not isinstance(value, values.DistributedValues):
# pylint: disable=protected-access
return cross_device_ops_lib.reduce_non_distributed_value(
reduce_op, self._device_map, value, destinations)
return self._cross_device_ops.reduce(
reduce_op, value, destinations=destinations)
def _batch_reduce_to(self, reduce_op, value_destination_pairs):
for _, destinations in value_destination_pairs:
self._verify_destinations_not_different_worker(destinations)
return self._cross_device_ops.batch_reduce(reduce_op,
value_destination_pairs)
def _select_single_value(self, structured):
"""Select any single values in `structured`."""
def _select_fn(x): # pylint: disable=g-missing-docstring
if isinstance(x, values.Mirrored):
if len(x.devices) == 1:
return x.primary<|fim▁hole|> else:
raise ValueError(
"You cannot update variable with a Mirrored object with multiple "
"components %r when using ParameterServerStrategy. You must "
"specify a single value or a Mirrored with a single value." % x)
elif isinstance(x, values.PerReplica):
raise ValueError(
"You cannot update variable with a PerReplica object %r when using "
"ParameterServerStrategy. You must specify a single value or a "
"Mirrored with a single value" % x)
else:
return x
return nest.map_structure(_select_fn, structured)
def _update(self, var, fn, args, kwargs, group):
if isinstance(var, values.AggregatingVariable):
var = var.get()
if not isinstance(var, resource_variable_ops.ResourceVariable):
raise ValueError(
"You can not update `var` %r. It must be a Variable." % var)
with ops.colocate_with(var), distribute_lib.UpdateContext(var.device):
result = fn(var, *self._select_single_value(args),
**self._select_single_value(kwargs))
if group:
return result
else:
return nest.map_structure(self._local_results, result)
# TODO(yuefengz): does it need to call _select_single_value?
def _update_non_slot(self, colocate_with, fn, args, kwargs, group):
with ops.device(
colocate_with.device), distribute_lib.UpdateContext(colocate_with):
result = fn(*args, **kwargs)
if group:
return result
else:
return nest.map_structure(self._local_results, result)
def _local_results(self, val):
if isinstance(val, values.DistributedValues):
return val.values
return (val,)
def value_container(self, val):
if (hasattr(val, "_aggregating_container") and
not isinstance(val, values.AggregatingVariable)):
wrapper = val._aggregating_container() # pylint: disable=protected-access
if wrapper is not None:
return wrapper
return val
def read_var(self, var):
# No need to distinguish between normal variables and replica-local
# variables.
return array_ops.identity(var)
def _configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
"""Configures the strategy class.
The strategy object will be re-initialized if `cluster_spec` is given but
was not passed in the constructor.
Args:
session_config: not used currently.
cluster_spec: a dict, ClusterDef or ClusterSpec object specifying the
cluster configurations.
task_type: the current task type.
task_id: the current task id.
Raises:
ValueError: if `cluster_spec` is given but `task_type` or `task_id` is
not.
"""
if cluster_spec:
# Use the num_gpus_per_worker recorded in constructor since _configure
# doesn't take num_gpus.
cluster_resolver = SimpleClusterResolver(
cluster_spec=multi_worker_util.normalize_cluster_spec(cluster_spec),
task_type=task_type,
task_id=task_id,
num_accelerators={"GPU": self._num_gpus_per_worker})
self._initialize_multi_worker(cluster_resolver)
if session_config:
session_config.CopyFrom(self._update_config_proto(session_config))
def _update_config_proto(self, config_proto):
updated_config = copy.deepcopy(config_proto)
if not self._cluster_spec:
updated_config.isolate_session_state = True
return updated_config
updated_config.isolate_session_state = False
assert self._task_type
assert self._task_id is not None
# The device filters prevent communication between workers.
del updated_config.device_filters[:]
if self._task_type in ["chief", "worker"]:
updated_config.device_filters.extend(
["/job:%s/task:%d" % (self._task_type, self._task_id), "/job:ps"])
elif self._task_type == "evaluator":
updated_config.device_filters.append(
"/job:%s/task:%d" % (self._task_type, self._task_id))
return updated_config
@property
def _num_replicas_in_sync(self):
return self._device_map.num_replicas_in_graph
@property
def worker_devices(self):
return self._device_map.all_devices
@property
def worker_devices_by_replica(self):
return self._device_map.devices_by_replica
@property
def parameter_devices(self):
return self._parameter_devices
def non_slot_devices(self, var_list):
return min(var_list, key=lambda x: x.name)
@property
def experimental_between_graph(self):
# TODO(yuefengz): Should this return False in the local case?
return True
@property
def experimental_should_init(self):
return self._is_chief
@property
def should_checkpoint(self):
return self._is_chief
@property
def should_save_summary(self):
return self._is_chief
# TODO(priyag): Delete this once all strategies use global batch size.
@property
def _global_batch_size(self):
"""`make_dataset_iterator` and `make_numpy_iterator` use global batch size.
`make_input_fn_iterator` assumes per-replica batching.
Returns:
Boolean.
"""
return True<|fim▁end|> | |
<|file_name|>website_bolt.py<|end_file_name|><|fim▁begin|><|fim▁hole|>
class WebsiteBolt(ExtractorBolt):
name ='website_extractor'
def __init__(self):
ExtractorBolt.__init__(self)
self.extractor = ExtractWebsite()<|fim▁end|> | from extractors.extract_website import ExtractWebsite
from datawakestreams.extractors.extractor_bolt import ExtractorBolt |
<|file_name|>StaticMeshComponent.cpp<|end_file_name|><|fim▁begin|>
#include "Internal.hpp"
#include <LuminoEngine/Graphics/Texture.hpp>
#include <LuminoEngine/Rendering/Material.hpp>
#include <LuminoEngine/Mesh/Mesh.hpp>
#include <LuminoEngine/Visual/StaticMeshComponent.hpp>
namespace ln {
//=============================================================================
// StaticMeshComponent
StaticMeshComponent::StaticMeshComponent()
: m_model(nullptr)
{
}
StaticMeshComponent::~StaticMeshComponent()
{
}
void StaticMeshComponent::init()
{
VisualComponent::init();
}
void StaticMeshComponent::setModel(StaticMeshModel* model)
{
m_model = model;
}
StaticMeshModel* StaticMeshComponent::model() const
{
return m_model;
}
void StaticMeshComponent::onRender(RenderingContext* context)
{
const auto& containers = m_model->meshContainers();
for (int iContainer = 0; iContainer < containers.size(); iContainer++)
{
const auto& meshContainer = containers[iContainer];
MeshResource* meshResource = meshContainer->meshResource();
if (meshResource) {
for (int iSection = 0; iSection < meshResource->sections().size(); iSection++) {
context->setMaterial(m_model->materials()[meshResource->sections()[iSection].materialIndex]);
context->drawMesh(meshResource, iSection);
}
}
//Mesh* mesh = meshContainer->mesh();
//if (mesh) {
// for (int iSection = 0; iSection < mesh->sections().size(); iSection++) {
// context->setMaterial(m_model->materials()[mesh->sections()[iSection].materialIndex]);
// context->drawMesh(mesh, iSection);
// }
//}
}
for (const auto& node : m_model->meshNodes()) {
if (node->meshContainerIndex() >= 0) {
context->setTransfrom(m_model->nodeGlobalTransform(node->index()));
const auto& meshContainer = m_model->meshContainers()[node->meshContainerIndex()];
Mesh* mesh = meshContainer->mesh();
if (mesh) {
for (int iSection = 0; iSection < mesh->sections().size(); iSection++) {
context->setMaterial(m_model->materials()[mesh->sections()[iSection].materialIndex]);
context->drawMesh(mesh, iSection);
}
}
}
}
}
<|fim▁hole|><|fim▁end|> |
} // namespace ln |
<|file_name|>az_completer.py<|end_file_name|><|fim▁begin|># --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
from prompt_toolkit.completion import Completer, Completion
import azclishell.configuration
from azclishell.argfinder import ArgsFinder
from azclishell.command_tree import in_tree
from azclishell.layout import get_scope
from azclishell.util import parse_quotes
from azure.cli.core.parser import AzCliCommandParser
from azure.cli.core.util import CLIError
SELECT_SYMBOL = azclishell.configuration.SELECT_SYMBOL
def dynamic_param_logic(text):
""" validates parameter values for dynamic completion """
is_param = False
started_param = False
prefix = ""
param = ""
txtspt = text.split()
if txtspt:
param = txtspt[-1]
if param.startswith("-"):
is_param = True
elif len(txtspt) > 2 and txtspt[-2]\
and txtspt[-2].startswith('-'):
is_param = True
param = txtspt[-2]
started_param = True
prefix = txtspt[-1]
return is_param, started_param, prefix, param
def reformat_cmd(text):
""" reformat the text to be stripped of noise """
# remove az if there
text = text.replace('az', '')
# disregard defaulting symbols
if text and SELECT_SYMBOL['scope'] == text[0:2]:
text = text.replace(SELECT_SYMBOL['scope'], "")
if get_scope():
text = get_scope() + ' ' + text
return text
def gen_dyn_completion(comp, started_param, prefix, text):
""" how to validate and generate completion for dynamic params """
if len(comp.split()) > 1:
completion = '\"' + comp + '\"'
else:
completion = comp
if started_param:
if comp.lower().startswith(prefix.lower()) and comp not in text.split():
yield Completion(completion, -len(prefix))
else:
yield Completion(completion, -len(prefix))
def sort_completions(gen):
""" sorts the completions """
def _get_weight(val):
""" weights the completions with required things first the lexicographically"""
priority = ''
if val.display_meta and val.display_meta.startswith('[REQUIRED]'):
priority = ' ' # a space has the lowest ordinance
return priority + val.text
return sorted(list(gen), key=_get_weight)
# pylint: disable=too-many-instance-attributes
class AzCompleter(Completer):
""" Completes Azure CLI commands """
def __init__(self, commands, global_params=True):
# dictionary of command to descriptions
self.command_description = commands.descrip
# from a command to a list of parameters
self.command_parameters = commands.command_param
# a list of all the possible parameters
self.completable_param = commands.completable_param
# the command tree
self.command_tree = commands.command_tree
# a dictionary of parameter (which is command + " " + parameter name)
# to a description of what it does
self.param_description = commands.param_descript
# a dictionary of command to examples of how to use it
self.command_examples = commands.command_example
# a dictionary of which parameters mean the same thing
self.same_param_doubles = commands.same_param_doubles or {}
self._is_command = True
self.branch = self.command_tree
self.curr_command = ""
self.global_param = commands.global_param if global_params else []
self.output_choices = commands.output_choices if global_params else []
self.output_options = commands.output_options if global_params else []
self.global_param_descriptions = commands.global_param_descriptions if global_params else []
self.global_parser = AzCliCommandParser(add_help=False)
self.global_parser.add_argument_group('global', 'Global Arguments')
self.parser = AzCliCommandParser(parents=[self.global_parser])
from azclishell._dump_commands import CMD_TABLE
self.cmdtab = CMD_TABLE
self.parser.load_command_table(CMD_TABLE)
self.argsfinder = ArgsFinder(self.parser)
def validate_completion(self, param, words, text_before_cursor, double=True):
""" validates that a param should be completed """
return param.lower().startswith(words.lower()) and param.lower() != words.lower() and\
param not in text_before_cursor.split() and not \
text_before_cursor[-1].isspace() and\
(not (double and param in self.same_param_doubles) or
self.same_param_doubles[param] not in text_before_cursor.split())
def get_completions(self, document, complete_event):
text = document.text_before_cursor
self.branch = self.command_tree
self.curr_command = ''
self._is_command = True
text = reformat_cmd(text)
if text.split():
for comp in sort_completions(self.gen_cmd_and_param_completions(text)):
yield comp
for cmd in sort_completions(self.gen_cmd_completions(text)):
yield cmd
for val in sort_completions(self.gen_dynamic_completions(text)):
yield val
for param in sort_completions(self.gen_global_param_completions(text)):
yield param
def gen_enum_completions(self, arg_name, text, started_param, prefix):
""" generates dynamic enumeration completions """
try: # if enum completion
for choice in self.cmdtab[
self.curr_command].arguments[arg_name].choices:
if started_param:
if choice.lower().startswith(prefix.lower())\
and choice not in text.split():
yield Completion(choice, -len(prefix))
else:
yield Completion(choice, -len(prefix))
except TypeError: # there is no choices option
pass
def get_arg_name(self, is_param, param):
""" gets the argument name used in the command table for a parameter """
if self.curr_command in self.cmdtab and is_param:
for arg in self.cmdtab[self.curr_command].arguments:
for name in self.cmdtab[self.curr_command].arguments[arg].options_list:
if name == param:
return arg
# pylint: disable=too-many-branches
def gen_dynamic_completions(self, text):
""" generates the dynamic values, like the names of resource groups """
try: # pylint: disable=too-many-nested-blocks
is_param, started_param, prefix, param = dynamic_param_logic(text)
# command table specific name
arg_name = self.get_arg_name(is_param, param)
if arg_name and ((text.split()[-1].startswith('-') and text[-1].isspace()) or
text.split()[-2].startswith('-')):
for comp in self.gen_enum_completions(arg_name, text, started_param, prefix):
yield comp
parse_args = self.argsfinder.get_parsed_args(
parse_quotes(text, quotes=False))
# there are 3 formats for completers the cli uses
# this try catches which format it is
if self.cmdtab[self.curr_command].arguments[arg_name].completer:
try:
for comp in self.cmdtab[self.curr_command].arguments[arg_name].completer(
parsed_args=parse_args):
for comp in gen_dyn_completion(
comp, started_param, prefix, text):
yield comp
except TypeError:
try:
for comp in self.cmdtab[self.curr_command].\
arguments[arg_name].completer(prefix):
for comp in gen_dyn_completion(
comp, started_param, prefix, text):
yield comp
except TypeError:
try:
for comp in self.cmdtab[self.curr_command].\
arguments[arg_name].completer():
for comp in gen_dyn_completion(
comp, started_param, prefix, text):
yield comp
except TypeError:
pass # other completion method used
except CLIError: # if the user isn't logged in
pass
def gen_cmd_completions(self, text):
""" whether is a space or no text typed, send the current branch """
# if nothing, so first level commands
if not text.split() and self._is_command:
if self.branch.children is not None:
for com in self.branch.children:
yield Completion(com.data)
# if space show current level commands
elif len(text.split()) > 0 and text[-1].isspace() and self._is_command:
if self.branch is not self.command_tree:
for com in self.branch.children:
yield Completion(com.data)
def yield_param_completion(self, param, last_word):
""" yields a parameter """
return Completion(param, -len(last_word), display_meta=self.get_param_description(
self.curr_command + " " + str(param)).replace('\n', ''))
def gen_cmd_and_param_completions(self, text):
""" generates command and parameter completions """
temp_command = str('')
txtspt = text.split()
<|fim▁hole|> for word in txtspt:
if word.startswith("-"):
self._is_command = False
# building what the command is
elif self._is_command:
temp_command += ' ' + str(word) if temp_command else str(word)
mid_val = text.find(word) + len(word)
# moving down command tree
if self.branch.has_child(word) and len(text) > mid_val and text[mid_val].isspace():
self.branch = self.branch.get_child(word, self.branch.children)
if len(text) > 0 and text[-1].isspace():
if in_tree(self.command_tree, temp_command):
self.curr_command = temp_command
else:
self._is_command = False
else:
self.curr_command = temp_command
last_word = txtspt[-1]
# this is for single char parameters
if last_word.startswith("-") and not last_word.startswith("--"):
self._is_command = False
if self.has_parameters(self.curr_command):
for param in self.command_parameters[self.curr_command]:
if self.validate_completion(param, last_word, text) and\
not param.startswith("--"):
yield self.yield_param_completion(param, last_word)
elif last_word.startswith("--"): # for regular parameters
self._is_command = False
if self.has_parameters(self.curr_command): # Everything should, map to empty list
for param in self.command_parameters[self.curr_command]:
if self.validate_completion(param, last_word, text):
yield self.yield_param_completion(param, last_word)
if self.branch.children and self._is_command: # all underneath commands
for kid in self.branch.children:
if self.validate_completion(kid.data, txtspt[-1], text, False):
yield Completion(
str(kid.data), -len(txtspt[-1]))
elif self._is_command:
for param in self.command_parameters[self.curr_command.strip()]:
if param.startswith('--'):
yield self.yield_param_completion(param, '')
def gen_global_param_completions(self, text):
""" Global parameter stuff hard-coded in """
txtspt = text.split()
if txtspt and len(txtspt) > 0:
for param in self.global_param:
# for single dash global parameters
if txtspt[-1].startswith('-') \
and not txtspt[-1].startswith('--') and \
param.startswith('-') and not param.startswith('--') and\
self.validate_completion(param, txtspt[-1], text, double=False):
yield Completion(
param, -len(txtspt[-1]),
display_meta=self.global_param_descriptions[param])
# for double dash global parameters
elif txtspt[-1].startswith('--') and \
self.validate_completion(param, txtspt[-1], text, double=False):
yield Completion(
param, -len(txtspt[-1]),
display_meta=self.global_param_descriptions[param])
# if there is an output, gets the options without user typing
if txtspt[-1] in self.output_options:
for opt in self.output_choices:
yield Completion(opt)
# if there is an output option, if they have started typing
if len(txtspt) > 1 and\
txtspt[-2] in self.output_options:
for opt in self.output_choices:
if self.validate_completion(opt, txtspt[-1], text, double=False):
yield Completion(opt, -len(txtspt[-1]))
def is_completable(self, symbol):
""" whether the word can be completed as a command or parameter """
return self.has_parameters(symbol) or symbol in self.param_description.keys()
def get_param_description(self, param):
""" gets a description of an empty string """
if param in self.param_description:
return self.param_description[param]
else:
return ""
def has_parameters(self, command):
""" returns whether given command is valid """
return command in self.command_parameters.keys()
def has_description(self, param):
""" if a parameter has a description """
return param in self.param_description.keys() and \
not self.param_description[param].isspace()<|fim▁end|> | |
<|file_name|>nytimes-scrape.py<|end_file_name|><|fim▁begin|>model_search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.response-format?" + \
"[q=search term&" + \
"fq=filter-field:(filter-term)&additional-params=values]" + \
"&api-key=9key"
"""http://api.nytimes.com/svc/search/v2/articlesearch.json?q=terrorism+OR+terrorist
&begin_date=19900102&end_date=19900103&sort=newest&api-key=
key"""
search = "http://api.nytimes.com/svc/search/v2/" + \
"articlesearch.json?" + \
"[q=terror]" + \<|fim▁hole|> "articlesearch.json"
terms = "?q=terrorism+OR+terrorist"
api = "&api-key=key"
print(precise_search+terms+dates+api)
"""
aggressive for looping in order to overcome the ten article limit. instead search each key word PER JOUR, and then concat the jsons into a nice pandas dataframe, and then eventually a csv.
"""
months_list = ["%.2d" % i for i in range(1,2)]
days_list = ["%.2d" % i for i in range(1,32)]
json_files = []
print(months_list)
for x in months_list:
month_s = x
month_e = x
for y in days_list:
day_s = y
day_e = str(int(y)+1).zfill(2)
year_s = "1990"
year_e = "1990"
start = year_s + month_s + day_s
end = year_e + month_e + day_e
dates = "&begin_date="+start+"&end_date="+end+"&sort=newest"
#print(start + " "+end + "\n" +dates)
r = requests.get(precise_search+terms+dates+api)
original_json = json.loads(r.text)
response_json = original_json['response']
json_file = response_json['docs']
json_files.append(json_file)
frames = []
for x in json_files:
df = pd.DataFrame.from_dict(x)
frames.append(df)
#print(frames)
result = pd.concat(frames)
result<|fim▁end|> | "&api-key=key"
precise_search = "http://api.nytimes.com/svc/search/v2/" + \ |
<|file_name|>raw.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! iOS-specific raw type definitions<|fim▁hole|>use os::raw::c_long;
use os::unix::raw::{uid_t, gid_t};
pub type blkcnt_t = i64;
pub type blksize_t = i32;
pub type dev_t = i32;
pub type ino_t = u64;
pub type mode_t = u16;
pub type nlink_t = u16;
pub type off_t = i64;
pub type time_t = c_long;
#[repr(C)]
pub struct stat {
pub st_dev: dev_t,
pub st_mode: mode_t,
pub st_nlink: nlink_t,
pub st_ino: ino_t,
pub st_uid: uid_t,
pub st_gid: gid_t,
pub st_rdev: dev_t,
pub st_atime: time_t,
pub st_atime_nsec: c_long,
pub st_mtime: time_t,
pub st_mtime_nsec: c_long,
pub st_ctime: time_t,
pub st_ctime_nsec: c_long,
pub st_birthtime: time_t,
pub st_birthtime_nsec: c_long,
pub st_size: off_t,
pub st_blocks: blkcnt_t,
pub st_blksize: blksize_t,
pub st_flags: u32,
pub st_gen: u32,
pub st_lspare: i32,
pub st_qspare: [i64; 2],
}<|fim▁end|> | |
<|file_name|>util.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014, Salesforce.com, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of Salesforce.com nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy
import scipy.stats
from collections import defaultdict
def scores_to_probs(scores):<|fim▁hole|> probs = numpy.exp(scores, out=scores)
probs /= probs.sum()
return probs
def score_to_empirical_kl(score, count):
"""
Convert total log score to KL( empirical || model ),
where the empirical pdf is uniform over `count` datapoints.
"""
count = float(count)
return -score / count - numpy.log(count)
def print_histogram(probs, counts):
WIDTH = 60.0
max_count = max(counts)
print '{: >8} {: >8}'.format('Prob', 'Count')
for prob, count in sorted(zip(probs, counts), reverse=True):
width = int(round(WIDTH * count / max_count))
print '{: >8.3f} {: >8d} {}'.format(prob, count, '-' * width)
def multinomial_goodness_of_fit(
probs,
counts,
total_count,
truncated=False,
plot=False):
"""
Pearson's chi^2 test, on possibly truncated data.
http://en.wikipedia.org/wiki/Pearson%27s_chi-squared_test
Returns:
p-value of truncated multinomial sample.
"""
assert len(probs) == len(counts)
assert truncated or total_count == sum(counts)
chi_squared = 0
dof = 0
if plot:
print_histogram(probs, counts)
for p, c in zip(probs, counts):
if p == 1:
return 1 if c == total_count else 0
assert p < 1, 'bad probability: %g' % p
if p > 0:
mean = total_count * p
variance = total_count * p * (1 - p)
assert variance > 1,\
'WARNING goodness of fit is inaccurate; use more samples'
chi_squared += (c - mean) ** 2 / variance
dof += 1
else:
print 'WARNING zero probability in goodness-of-fit test'
if c > 0:
return float('inf')
if not truncated:
dof -= 1
survival = scipy.stats.chi2.sf(chi_squared, dof)
return survival
def unif01_goodness_of_fit(samples, plot=False):
"""
Bin uniformly distributed samples and apply Pearson's chi^2 test.
"""
samples = numpy.array(samples, dtype=float)
assert samples.min() >= 0.0
assert samples.max() <= 1.0
bin_count = int(round(len(samples) ** 0.333))
assert bin_count >= 7, 'WARNING imprecise test, use more samples'
probs = numpy.ones(bin_count, dtype=numpy.float) / bin_count
counts = numpy.zeros(bin_count, dtype=numpy.int)
for sample in samples:
counts[int(bin_count * sample)] += 1
return multinomial_goodness_of_fit(probs, counts, len(samples), plot=plot)
def density_goodness_of_fit(samples, probs, plot=False):
"""
Transform arbitrary continuous samples to unif01 distribution
and assess goodness of fit via Pearson's chi^2 test.
Inputs:
samples - a list of real-valued samples from a distribution
probs - a list of probability densities evaluated at those samples
"""
assert len(samples) == len(probs)
assert len(samples) > 100, 'WARNING imprecision; use more samples'
pairs = zip(samples, probs)
pairs.sort()
samples = numpy.array([x for x, p in pairs])
probs = numpy.array([p for x, p in pairs])
density = numpy.sqrt(probs[1:] * probs[:-1])
gaps = samples[1:] - samples[:-1]
unif01_samples = 1.0 - numpy.exp(-len(samples) * gaps * density)
return unif01_goodness_of_fit(unif01_samples, plot=plot)
def discrete_goodness_of_fit(
samples,
probs_dict,
truncate_beyond=8,
plot=False):
"""
Transform arbitrary discrete data to multinomial
and assess goodness of fit via Pearson's chi^2 test.
"""
assert len(samples) > 100, 'WARNING imprecision; use more samples'
counts = defaultdict(lambda: 0)
for sample in samples:
assert sample in probs_dict
counts[sample] += 1
items = [(prob, counts.get(i, 0)) for i, prob in probs_dict.iteritems()]
items.sort(reverse=True)
truncated = (truncate_beyond and truncate_beyond < len(items))
if truncated:
items = items[:truncate_beyond]
probs = [prob for prob, count in items]
counts = [count for prob, count in items]
return multinomial_goodness_of_fit(
probs,
counts,
len(samples),
truncated=truncated,
plot=plot)
def bin_samples(samples, k=10, support=[]):
"""
Bins a collection of univariate samples into k bins of equal
fill via the empirical cdf, to be used in goodness of fit testing.
Returns
counts : array k x 1
bin_ranges : arrary k x 2
each count is the number of samples in [bin_min, bin_max)
except for the last bin which is [bin_min, bin_max]
list partitioning algorithm adapted from Mark Dickinson:
http://stackoverflow.com/questions/2659900
"""
samples = sorted(samples)
N = len(samples)
q, r = divmod(N, k)
#we need to distribute the remainder relatively evenly
#tests will be inaccurate if we have small bins at the end
indices = [i * q + min(r, i) for i in range(k + 1)]
bins = [samples[indices[i]: indices[i + 1]] for i in range(k)]
bin_ranges = []
counts = []
for i in range(k):
bin_min = bins[i][0]
try:
bin_max = bins[i + 1][0]
except IndexError:
bin_max = bins[i][-1]
bin_ranges.append([bin_min, bin_max])
counts.append(len(bins[i]))
if support:
bin_ranges[0][0] = support[0]
bin_ranges[-1][1] = support[1]
return numpy.array(counts), numpy.array(bin_ranges)
def histogram(samples, bin_count=None):
if bin_count is None:
bin_count = numpy.max(samples) + 1
v = numpy.zeros(bin_count, dtype=int)
for sample in samples:
v[sample] += 1
return v<|fim▁end|> | scores = numpy.array(scores)
scores -= scores.max() |
<|file_name|>cotisations.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import division<|fim▁hole|>from openfisca_core import reforms
from openfisca_france.model.base import FloatCol, Individus, Variable
# Build function
def build_reform(tax_benefit_system):
Reform = reforms.make_reform(
key = 'revenu_de_base_cotisations',
name = u"Réforme des cotisations pour un Revenu de base",
reference = tax_benefit_system,
)
class cotisations_contributives(Variable):
column = FloatCol
entity_class = Individus
label = u"Nouvelles cotisations contributives"
def function(self, simulation, period):
ags = simulation.calculate('ags', period)
agff_tranche_a_employeur = simulation.calculate('agff_tranche_a_employeur', period)
apec_employeur = simulation.calculate('apec_employeur', period)
arrco_tranche_a_employeur = simulation.calculate('arrco_tranche_a_employeur', period)
assedic_employeur = simulation.calculate('assedic_employeur', period)
cotisation_exceptionnelle_temporaire_employeur = simulation.calculate(
'cotisation_exceptionnelle_temporaire_employeur', period)
fonds_emploi_hospitalier = simulation.calculate('fonds_emploi_hospitalier', period)
ircantec_employeur = simulation.calculate('ircantec_employeur', period)
pension_civile_employeur = simulation.calculate('pension_civile_employeur', period)
prevoyance_obligatoire_cadre = simulation.calculate('prevoyance_obligatoire_cadre', period)
rafp_employeur = simulation.calculate('rafp_employeur', period)
vieillesse_deplafonnee_employeur = simulation.calculate('vieillesse_deplafonnee_employeur', period)
vieillesse_plafonnee_employeur = simulation.calculate('vieillesse_plafonnee_employeur', period)
allocations_temporaires_invalidite = simulation.calculate('allocations_temporaires_invalidite', period)
accident_du_travail = simulation.calculate('accident_du_travail', period)
agff_tranche_a_employe = simulation.calculate('agff_tranche_a_employe', period)
agirc_tranche_b_employe = simulation.calculate('agirc_tranche_b_employe', period)
apec_employe = simulation.calculate('apec_employe', period)
arrco_tranche_a_employe = simulation.calculate('arrco_tranche_a_employe', period)
assedic_employe = simulation.calculate('assedic_employe', period)
cotisation_exceptionnelle_temporaire_employe = simulation.calculate(
'cotisation_exceptionnelle_temporaire_employe', period)
ircantec_employe = simulation.calculate('ircantec_employe', period)
pension_civile_employe = simulation.calculate('pension_civile_employe', period)
rafp_employe = simulation.calculate('rafp_employe', period)
vieillesse_deplafonnee_employe = simulation.calculate('vieillesse_deplafonnee_employe', period)
vieillesse_plafonnee_employe = simulation.calculate('vieillesse_plafonnee_employe', period)
cotisations_contributives = (
# cotisations patronales contributives dans le prive
ags +
agff_tranche_a_employeur +
apec_employeur +
arrco_tranche_a_employeur +
assedic_employeur +
cotisation_exceptionnelle_temporaire_employeur +
prevoyance_obligatoire_cadre + # TODO contributive ou pas
vieillesse_deplafonnee_employeur +
vieillesse_plafonnee_employeur +
# cotisations patronales contributives dans le public
fonds_emploi_hospitalier +
ircantec_employeur +
pension_civile_employeur +
rafp_employeur +
# anciennes cot patronales non-contributives classées ici comme contributives
allocations_temporaires_invalidite +
accident_du_travail +
# anciennes cotisations salariales contributives dans le prive
agff_tranche_a_employe +
agirc_tranche_b_employe +
apec_employe +
arrco_tranche_a_employe +
assedic_employe +
cotisation_exceptionnelle_temporaire_employe +
vieillesse_deplafonnee_employe +
vieillesse_plafonnee_employe +
# anciennes cotisations salariales contributives dans le public
ircantec_employe +
pension_civile_employe +
rafp_employe
)
return period, cotisations_contributives
class nouv_salaire_de_base(Variable):
reference = tax_benefit_system.column_by_name['salaire_de_base']
# Le salaire brut se définit dans la réforme comme le salaire super-brut auquel
# on retranche les cotisations contributives
def function(self, simulation, period):
period = period.start.period('month').offset('first-of')
salsuperbrut = simulation.calculate('salsuperbrut', period)
cotisations_contributives = simulation.calculate('cotisations_contributives', period)
nouv_salaire_de_base = (
salsuperbrut -
cotisations_contributives
)
return period, nouv_salaire_de_base
class nouv_csg(Variable):
reference = tax_benefit_system.column_by_name['csg_imposable_salaire']
# On applique une CSG unique à 22,5% qui finance toutes les prestations non-contributives
def function(self, simulation, period):
period = period.start.period('month').offset('first-of')
nouv_salaire_de_base = simulation.calculate('nouv_salaire_de_base', period)
nouv_csg = (
-0.225 * nouv_salaire_de_base
)
return period, nouv_csg
class salaire_net(Variable):
reference = tax_benefit_system.column_by_name['salaire_net']
# On retire la nouvelle CSG (pas celle qui finance le RDB) pour trouver le nouveau salaire net
def function(self, simulation, period):
period = period.start.period('month').offset('first-of')
nouv_salaire_de_base = simulation.calculate('nouv_salaire_de_base', period)
nouv_csg = simulation.calculate('nouv_csg', period)
salaire_net = (
nouv_salaire_de_base +
nouv_csg
)
return period, salaire_net
class salaire_imposable(Variable):
reference = tax_benefit_system.column_by_name['salaire_imposable']
# Nous sommes partis du nouveau salaire net et par rapport au salaire imposable actuel,
# nous avons supprimé : les heures sup, la déductibilité de CSG
def function(self, simulation, period):
period = period
hsup = simulation.calculate('hsup', period)
salaire_net = simulation.calculate('salaire_net', period)
primes_fonction_publique = simulation.calculate('primes_fonction_publique', period)
indemnite_residence = simulation.calculate('indemnite_residence', period)
supp_familial_traitement = simulation.calculate('supp_familial_traitement', period)
rev_microsocial_declarant1 = simulation.calculate('rev_microsocial_declarant1', period)
return period, (
salaire_net +
primes_fonction_publique +
indemnite_residence +
supp_familial_traitement +
hsup +
rev_microsocial_declarant1
)
return Reform()<|fim▁end|> | |
<|file_name|>angular-cookies.js<|end_file_name|><|fim▁begin|>/**
* @license AngularJS v1.3.0-build.2690+sha.be7c02c
* (c) 2010-2014 Google, Inc. http://angularjs.org
* License: MIT
*/
(function(window, angular, undefined) {'use strict';
/**
* @ngdoc module
* @name ngCookies
* @description
*
* # ngCookies
*
* The `ngCookies` module provides a convenient wrapper for reading and writing browser cookies.
*
*
* <div doc-module-components="ngCookies"></div>
*
* See {@link ngCookies.$cookies `$cookies`} and
* {@link ngCookies.$cookieStore `$cookieStore`} for usage.
*/
angular.module('ngCookies', ['ng']).
/**
* @ngdoc service
* @name $cookies
*
* @description
* Provides read/write access to browser's cookies.
*
* Only a simple Object is exposed and by adding or removing properties to/from this object, new
* cookies are created/deleted at the end of current $eval.
* The object's properties can only be strings.
*
* Requires the {@link ngCookies `ngCookies`} module to be installed.
*
* @example
*
* ```js
* function ExampleController($cookies) {
* // Retrieving a cookie
* var favoriteCookie = $cookies.myFavorite;
* // Setting a cookie
* $cookies.myFavorite = 'oatmeal';
* }
* ```
*/
factory('$cookies', ['$rootScope', '$browser', function ($rootScope, $browser) {
var cookies = {},
lastCookies = {},
lastBrowserCookies,
runEval = false,
copy = angular.copy,
isUndefined = angular.isUndefined;
//creates a poller fn that copies all cookies from the $browser to service & inits the service
$browser.addPollFn(function() {
var currentCookies = $browser.cookies();
if (lastBrowserCookies != currentCookies) { //relies on browser.cookies() impl
lastBrowserCookies = currentCookies;
copy(currentCookies, lastCookies);
copy(currentCookies, cookies);
if (runEval) $rootScope.$apply();
}
})();
runEval = true;
//at the end of each eval, push cookies
//TODO: this should happen before the "delayed" watches fire, because if some cookies are not
// strings or browser refuses to store some cookies, we update the model in the push fn.
$rootScope.$watch(push);
return cookies;
/**
* Pushes all the cookies from the service to the browser and verifies if all cookies were
* stored.
*/
function push() {
var name,
value,
browserCookies,
updated;
//delete any cookies deleted in $cookies
for (name in lastCookies) {
if (isUndefined(cookies[name])) {
$browser.cookies(name, undefined);
}
}
//update all cookies updated in $cookies
for(name in cookies) {
value = cookies[name];
if (!angular.isString(value)) {
value = '' + value;
cookies[name] = value;
}
if (value !== lastCookies[name]) {
$browser.cookies(name, value);
updated = true;
}
}
//verify what was actually stored
if (updated){
updated = false;
browserCookies = $browser.cookies();
for (name in cookies) {
if (cookies[name] !== browserCookies[name]) {
//delete or reset all cookies that the browser dropped from $cookies
if (isUndefined(browserCookies[name])) {
delete cookies[name];
} else {
cookies[name] = browserCookies[name];
}
updated = true;
}
}
}
}
}]).
/**
* @ngdoc service<|fim▁hole|> * @name $cookieStore
* @requires $cookies
*
* @description
* Provides a key-value (string-object) storage, that is backed by session cookies.
* Objects put or retrieved from this storage are automatically serialized or
* deserialized by angular's toJson/fromJson.
*
* Requires the {@link ngCookies `ngCookies`} module to be installed.
*
* @example
*
* ```js
* function ExampleController($cookieStore) {
* // Put cookie
* $cookieStore.put('myFavorite','oatmeal');
* // Get cookie
* var favoriteCookie = $cookieStore.get('myFavorite');
* // Removing a cookie
* $cookieStore.remove('myFavorite');
* }
* ```
*/
factory('$cookieStore', ['$cookies', function($cookies) {
return {
/**
* @ngdoc method
* @name $cookieStore#get
*
* @description
* Returns the value of given cookie key
*
* @param {string} key Id to use for lookup.
* @returns {Object} Deserialized cookie value.
*/
get: function(key) {
var value = $cookies[key];
return value ? angular.fromJson(value) : value;
},
/**
* @ngdoc method
* @name $cookieStore#put
*
* @description
* Sets a value for given cookie key
*
* @param {string} key Id for the `value`.
* @param {Object} value Value to be stored.
*/
put: function(key, value) {
$cookies[key] = angular.toJson(value);
},
/**
* @ngdoc method
* @name $cookieStore#remove
*
* @description
* Remove given cookie
*
* @param {string} key Id of the key-value pair to delete.
*/
remove: function(key) {
delete $cookies[key];
}
};
}]);
})(window, window.angular);<|fim▁end|> | |
<|file_name|>bootstrap.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core';<|fim▁hole|>@Component({selector: 'my-app', template: 'Hello {{ name }}!'})
class MyApp {
name: string = 'World';
}
function main() {
return bootstrap(MyApp);
}
// #enddocregion<|fim▁end|> | import {bootstrap} from '@angular/platform-browser';
// #docregion bootstrap |
<|file_name|>callback.rs<|end_file_name|><|fim▁begin|>use std::mem;
use std::ptr;
use std::cell::RefCell;
use std::sync::mpsc::Sender;
use std::sync::{Arc, Mutex};
use std::ffi::OsString;
use std::os::windows::ffi::OsStringExt;
use CursorState;
use Event;
use super::event;
use user32;
use shell32;
use winapi;
/// There's no parameters passed to the callback function, so it needs to get
/// its context (the HWND, the Sender for events, etc.) stashed in
/// a thread-local variable.
thread_local!(pub static CONTEXT_STASH: RefCell<Option<ThreadLocalData>> = RefCell::new(None));
pub struct ThreadLocalData {
pub win: winapi::HWND,
pub sender: Sender<Event>,
pub cursor_state: Arc<Mutex<CursorState>>
}
/// Checks that the window is the good one, and if so send the event to it.
fn send_event(input_window: winapi::HWND, event: Event) {
CONTEXT_STASH.with(|context_stash| {
let context_stash = context_stash.borrow();
let stored = match *context_stash {
None => return,
Some(ref v) => v
};
let &ThreadLocalData { ref win, ref sender, .. } = stored;
if win != &input_window {
return;
}
sender.send(event).ok(); // ignoring if closed
});
}
/// This is the callback that is called by `DispatchMessage` in the events loop.
///
/// Returning 0 tells the Win32 API that the message has been processed.
// FIXME: detect WM_DWMCOMPOSITIONCHANGED and call DwmEnableBlurBehindWindow if necessary
pub unsafe extern "system" fn callback(window: winapi::HWND, msg: winapi::UINT,
wparam: winapi::WPARAM, lparam: winapi::LPARAM)
-> winapi::LRESULT
{
match msg {
winapi::WM_DESTROY => {
use events::Event::Closed;
CONTEXT_STASH.with(|context_stash| {
let context_stash = context_stash.borrow();
let stored = match *context_stash {
None => return,
Some(ref v) => v
};
let &ThreadLocalData { ref win, .. } = stored;
if win == &window {
user32::PostQuitMessage(0);
}
});
send_event(window, Closed);
0
},
winapi::WM_ERASEBKGND => {
1
},
winapi::WM_SIZE => {
use events::Event::Resized;
let w = winapi::LOWORD(lparam as winapi::DWORD) as u32;
let h = winapi::HIWORD(lparam as winapi::DWORD) as u32;
send_event(window, Resized(w, h));
0
},
winapi::WM_MOVE => {
use events::Event::Moved;
let x = winapi::LOWORD(lparam as winapi::DWORD) as i32;
let y = winapi::HIWORD(lparam as winapi::DWORD) as i32;
send_event(window, Moved(x, y));
0
},
winapi::WM_CHAR => {
use std::mem;
use events::Event::ReceivedCharacter;
let chr: char = mem::transmute(wparam as u32);
send_event(window, ReceivedCharacter(chr));
0
},
// Prevents default windows menu hotkeys playing unwanted
// "ding" sounds. Alternatively could check for WM_SYSCOMMAND
// with wparam being SC_KEYMENU, but this may prevent some
// other unwanted default hotkeys as well.
winapi::WM_SYSCHAR => {
0
}
winapi::WM_MOUSEMOVE => {
use events::Event::MouseMoved;
let x = winapi::GET_X_LPARAM(lparam) as i32;
let y = winapi::GET_Y_LPARAM(lparam) as i32;
send_event(window, MouseMoved((x, y)));
0
},
winapi::WM_MOUSEWHEEL => {
use events::Event::MouseWheel;
use events::MouseScrollDelta::LineDelta;
let value = (wparam >> 16) as i16;
let value = value as i32;
let value = value as f32 / winapi::WHEEL_DELTA as f32;
send_event(window, MouseWheel(LineDelta(0.0, value)));
0
},
winapi::WM_KEYDOWN | winapi::WM_SYSKEYDOWN => {
use events::Event::KeyboardInput;
use events::ElementState::Pressed;
if msg == winapi::WM_SYSKEYDOWN && wparam as i32 == winapi::VK_F4 {
user32::DefWindowProcW(window, msg, wparam, lparam)
} else {
let (scancode, vkey) = event::vkeycode_to_element(wparam, lparam);
send_event(window, KeyboardInput(Pressed, scancode, vkey));
0
}
},
winapi::WM_KEYUP | winapi::WM_SYSKEYUP => {
use events::Event::KeyboardInput;
use events::ElementState::Released;
let (scancode, vkey) = event::vkeycode_to_element(wparam, lparam);
send_event(window, KeyboardInput(Released, scancode, vkey));
0
},
winapi::WM_LBUTTONDOWN => {
use events::Event::MouseInput;
use events::MouseButton::Left;
use events::ElementState::Pressed;
send_event(window, MouseInput(Pressed, Left));
0
},
winapi::WM_LBUTTONUP => {
use events::Event::MouseInput;
use events::MouseButton::Left;
use events::ElementState::Released;
send_event(window, MouseInput(Released, Left));
0<|fim▁hole|>
winapi::WM_RBUTTONDOWN => {
use events::Event::MouseInput;
use events::MouseButton::Right;
use events::ElementState::Pressed;
send_event(window, MouseInput(Pressed, Right));
0
},
winapi::WM_RBUTTONUP => {
use events::Event::MouseInput;
use events::MouseButton::Right;
use events::ElementState::Released;
send_event(window, MouseInput(Released, Right));
0
},
winapi::WM_MBUTTONDOWN => {
use events::Event::MouseInput;
use events::MouseButton::Middle;
use events::ElementState::Pressed;
send_event(window, MouseInput(Pressed, Middle));
0
},
winapi::WM_MBUTTONUP => {
use events::Event::MouseInput;
use events::MouseButton::Middle;
use events::ElementState::Released;
send_event(window, MouseInput(Released, Middle));
0
},
winapi::WM_INPUT => {
let mut data: winapi::RAWINPUT = mem::uninitialized();
let mut data_size = mem::size_of::<winapi::RAWINPUT>() as winapi::UINT;
user32::GetRawInputData(mem::transmute(lparam), winapi::RID_INPUT,
mem::transmute(&mut data), &mut data_size,
mem::size_of::<winapi::RAWINPUTHEADER>() as winapi::UINT);
if data.header.dwType == winapi::RIM_TYPEMOUSE {
let _x = data.mouse.lLastX; // FIXME: this is not always the relative movement
let _y = data.mouse.lLastY;
// TODO:
//send_event(window, Event::MouseRawMovement { x: x, y: y });
0
} else {
user32::DefWindowProcW(window, msg, wparam, lparam)
}
},
winapi::WM_SETFOCUS => {
use events::Event::Focused;
send_event(window, Focused(true));
0
},
winapi::WM_KILLFOCUS => {
use events::Event::Focused;
send_event(window, Focused(false));
0
},
winapi::WM_SETCURSOR => {
CONTEXT_STASH.with(|context_stash| {
let cstash = context_stash.borrow();
let cstash = cstash.as_ref();
// there's a very bizarre borrow checker bug
// possibly related to rust-lang/rust/#23338
let _cursor_state = if let Some(cstash) = cstash {
if let Ok(cursor_state) = cstash.cursor_state.lock() {
match *cursor_state {
CursorState::Normal => {
user32::SetCursor(user32::LoadCursorW(
ptr::null_mut(),
winapi::IDC_ARROW));
},
CursorState::Grab | CursorState::Hide => {
user32::SetCursor(ptr::null_mut());
}
}
}
} else {
return
};
// let &ThreadLocalData { ref cursor_state, .. } = stored;
});
0
},
winapi::WM_DROPFILES => {
use events::Event::DroppedFile;
let hdrop = wparam as winapi::HDROP;
let mut pathbuf: [u16; winapi::MAX_PATH] = mem::uninitialized();
let num_drops = shell32::DragQueryFileW(hdrop, 0xFFFFFFFF, ptr::null_mut(), 0);
for i in 0..num_drops {
let nch = shell32::DragQueryFileW(hdrop, i, pathbuf.as_mut_ptr(),
winapi::MAX_PATH as u32) as usize;
if nch > 0 {
send_event(window, DroppedFile(OsString::from_wide(&pathbuf[0..nch]).into()));
}
}
shell32::DragFinish(hdrop);
0
},
_ => {
user32::DefWindowProcW(window, msg, wparam, lparam)
}
}
}<|fim▁end|> | }, |
<|file_name|>VirtualMachineRestrictMovementState.java<|end_file_name|><|fim▁begin|>// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.avs.models;
import com.azure.core.util.ExpandableStringEnum;
import com.fasterxml.jackson.annotation.JsonCreator;
import java.util.Collection;
/** Defines values for VirtualMachineRestrictMovementState. */
public final class VirtualMachineRestrictMovementState
extends ExpandableStringEnum<VirtualMachineRestrictMovementState> {
/** Static value Enabled for VirtualMachineRestrictMovementState. */
public static final VirtualMachineRestrictMovementState ENABLED = fromString("Enabled");
/** Static value Disabled for VirtualMachineRestrictMovementState. */
public static final VirtualMachineRestrictMovementState DISABLED = fromString("Disabled");
/**
* Creates or finds a VirtualMachineRestrictMovementState from its string representation.
*
* @param name a name to look for.
* @return the corresponding VirtualMachineRestrictMovementState.
*/
@JsonCreator
public static VirtualMachineRestrictMovementState fromString(String name) {
return fromString(name, VirtualMachineRestrictMovementState.class);
}<|fim▁hole|> return values(VirtualMachineRestrictMovementState.class);
}
}<|fim▁end|> |
/** @return known VirtualMachineRestrictMovementState values. */
public static Collection<VirtualMachineRestrictMovementState> values() { |
<|file_name|>che-clipboard.directive.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2015-2017 Red Hat, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Red Hat, Inc. - initial API and implementation
*/
'use strict';
interface ICheClipboardScope extends ng.IScope {
value: string;
isCopied: boolean;
onClick: Function;
}
/**
* Defines a directive for the clipboard.
* @author Oleksii Orel
*/
export class CheClipboard implements ng.IDirective {
restrict = 'E';
replace = true;
templateUrl = 'components/widget/copy-clipboard/che-clipboard.html';
scope = {
value: '=cheValue'
};
private $window: ng.IWindowService;
private $log: ng.ILogService;
/**
* Default constructor that is using resource
* @ngInject for Dependency injection
*/
constructor($window: ng.IWindowService, $log: ng.ILogService) {
this.$window = $window;
this.$log = $log;
}
link($scope: ICheClipboardScope, $element: ng.IAugmentedJQuery): void {
const clipboardIconJq = $element.find('.copy-clipboard-area');
const invInputJq = $element.find('input');
$scope.onClick = () => {
invInputJq.select();
const copy = 'copy';
if (this.$window.document.queryCommandSupported(copy)) {
try {
const isCopied = this.$window.document.execCommand(copy);
if (isCopied) {
this.$window.getSelection().removeAllRanges();
clipboardIconJq.focus();
$scope.isCopied = true;
}
} catch (error) {
this.$log.error('Error. ' + error);
}
}
};<|fim▁hole|><|fim▁end|> | }
} |
<|file_name|>symmetric.go<|end_file_name|><|fim▁begin|>// Copyright ©2015 The gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package mat64
import (
"math"
"github.com/gonum/blas"
"github.com/gonum/blas/blas64"
"github.com/gonum/matrix"
)
var (
symDense *SymDense
_ Matrix = symDense
_ Symmetric = symDense
_ RawSymmetricer = symDense
_ MutableSymmetric = symDense
)
const (
badSymTriangle = "mat64: blas64.Symmetric not upper"
badSymCap = "mat64: bad capacity for SymDense"
)
// SymDense is a symmetric matrix that uses dense storage. SymDense
// matrices are stored in the upper triangle.
type SymDense struct {
mat blas64.Symmetric
cap int
}
// Symmetric represents a symmetric matrix (where the element at {i, j} equals
// the element at {j, i}). Symmetric matrices are always square.
type Symmetric interface {
Matrix
// Symmetric returns the number of rows/columns in the matrix.
Symmetric() int
}
// A RawSymmetricer can return a view of itself as a BLAS Symmetric matrix.
type RawSymmetricer interface {
RawSymmetric() blas64.Symmetric
}
type MutableSymmetric interface {
Symmetric
SetSym(i, j int, v float64)
}
// NewSymDense creates a new Symmetric matrix with n rows and columns. If data == nil,
// a new slice is allocated for the backing slice. If len(data) == n*n, data is
// used as the backing slice, and changes to the elements of the returned SymDense
// will be reflected in data. If neither of these is true, NewSymDense will panic.
//
// The data must be arranged in row-major order, i.e. the (i*c + j)-th
// element in the data slice is the {i, j}-th element in the matrix.
// Only the values in the upper triangular portion of the matrix are used.
func NewSymDense(n int, data []float64) *SymDense {
if n < 0 {
panic("mat64: negative dimension")
}
if data != nil && n*n != len(data) {
panic(matrix.ErrShape)
}
if data == nil {
data = make([]float64, n*n)
}
return &SymDense{
mat: blas64.Symmetric{
N: n,
Stride: n,
Data: data,
Uplo: blas.Upper,
},
cap: n,
}
}
func (s *SymDense) Dims() (r, c int) {
return s.mat.N, s.mat.N
}
// T implements the Matrix interface. Symmetric matrices, by definition, are
// equal to their transpose, and this is a no-op.
func (s *SymDense) T() Matrix {
return s
}
func (s *SymDense) Symmetric() int {
return s.mat.N
}
// RawSymmetric returns the matrix as a blas64.Symmetric. The returned
// value must be stored in upper triangular format.
func (s *SymDense) RawSymmetric() blas64.Symmetric {
return s.mat
}
// SetRawSymmetric sets the underlying blas64.Symmetric used by the receiver.
// Changes to elements in the receiver following the call will be reflected
// in b. SetRawSymmetric will panic if b is not an upper-encoded symmetric
// matrix.
func (s *SymDense) SetRawSymmetric(b blas64.Symmetric) {
if b.Uplo != blas.Upper {
panic(badSymTriangle)
}
s.mat = b
}
// Reset zeros the dimensions of the matrix so that it can be reused as the
// receiver of a dimensionally restricted operation.
//
// See the Reseter interface for more information.
func (s *SymDense) Reset() {
// N and Stride must be zeroed in unison.
s.mat.N, s.mat.Stride = 0, 0
s.mat.Data = s.mat.Data[:0]
}
func (s *SymDense) isZero() bool {
// It must be the case that m.Dims() returns
// zeros in this case. See comment in Reset().
return s.mat.N == 0
}
// reuseAs resizes an empty matrix to a n×n matrix,
// or checks that a non-empty matrix is n×n.
func (s *SymDense) reuseAs(n int) {
if s.mat.N > s.cap {
panic(badSymCap)
}
if s.isZero() {
s.mat = blas64.Symmetric{
N: n,
Stride: n,
Data: use(s.mat.Data, n*n),
Uplo: blas.Upper,
}
s.cap = n
return
}
if s.mat.Uplo != blas.Upper {
panic(badSymTriangle)
}
if s.mat.N != n {
panic(matrix.ErrShape)
}
}
func (s *SymDense) isolatedWorkspace(a Symmetric) (w *SymDense, restore func()) {
n := a.Symmetric()
w = getWorkspaceSym(n, false)
return w, func() {
s.CopySym(w)
putWorkspaceSym(w)
}
}
func (s *SymDense) AddSym(a, b Symmetric) {
n := a.Symmetric()
if n != b.Symmetric() {
panic(matrix.ErrShape)
}
s.reuseAs(n)
if a, ok := a.(RawSymmetricer); ok {
if b, ok := b.(RawSymmetricer); ok {
amat, bmat := a.RawSymmetric(), b.RawSymmetric()
if s != a {
s.checkOverlap(amat)
}
if s != b {
s.checkOverlap(bmat)
}
for i := 0; i < n; i++ {
btmp := bmat.Data[i*bmat.Stride+i : i*bmat.Stride+n]
stmp := s.mat.Data[i*s.mat.Stride+i : i*s.mat.Stride+n]
for j, v := range amat.Data[i*amat.Stride+i : i*amat.Stride+n] {
stmp[j] = v + btmp[j]
}
}
return
}
}
for i := 0; i < n; i++ {
stmp := s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+n]
for j := i; j < n; j++ {
stmp[j] = a.At(i, j) + b.At(i, j)
}
}
}
func (s *SymDense) CopySym(a Symmetric) int {
n := a.Symmetric()
n = min(n, s.mat.N)
if n == 0 {
return 0
}
switch a := a.(type) {
case RawSymmetricer:
amat := a.RawSymmetric()
if amat.Uplo != blas.Upper {
panic(badSymTriangle)
}
for i := 0; i < n; i++ {
copy(s.mat.Data[i*s.mat.Stride+i:i*s.mat.Stride+n], amat.Data[i*amat.Stride+i:i*amat.Stride+n])
}
default:
for i := 0; i < n; i++ {
stmp := s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+n]
for j := i; j < n; j++ {
stmp[j] = a.At(i, j)
}
}
}
return n
}
// SymRankOne performs a symetric rank-one update to the matrix a and stores
// the result in the receiver
// s = a + alpha * x * x'
func (s *SymDense) SymRankOne(a Symmetric, alpha float64, x *Vector) {
n := x.Len()
if a.Symmetric() != n {
panic(matrix.ErrShape)
}
s.reuseAs(n)
if s != a {
if rs, ok := a.(RawSymmetricer); ok {
s.checkOverlap(rs.RawSymmetric())
}
s.CopySym(a)
}
blas64.Syr(alpha, x.mat, s.mat)
}
// SymRankK performs a symmetric rank-k update to the matrix a and stores the
// result into the receiver. If a is zero, see SymOuterK.
// s = a + alpha * x * x'
func (s *SymDense) SymRankK(a Symmetric, alpha float64, x Matrix) {
n := a.Symmetric()
r, _ := x.Dims()
if r != n {
panic(matrix.ErrShape)
}
xMat, aTrans := untranspose(x)
var g blas64.General
if rm, ok := xMat.(RawMatrixer); ok {
g = rm.RawMatrix()
} else {
g = DenseCopyOf(x).mat
aTrans = false
}
if a != s {
if rs, ok := a.(RawSymmetricer); ok {
s.checkOverlap(rs.RawSymmetric())
}
s.reuseAs(n)
s.CopySym(a)
}
t := blas.NoTrans
if aTrans {
t = blas.Trans
}
blas64.Syrk(t, alpha, g, 1, s.mat)
}
// SymOuterK calculates the outer product of x with itself and stores
// the result into the receiver. It is equivalent to the matrix
// multiplication
// s = alpha * x * x'.
// In order to update an existing matrix, see SymRankOne.
func (s *SymDense) SymOuterK(alpha float64, x Matrix) {
n, _ := x.Dims()
switch {
case s.isZero():
s.mat = blas64.Symmetric{
N: n,
Stride: n,
Data: useZeroed(s.mat.Data, n*n),
Uplo: blas.Upper,
}
s.cap = n
s.SymRankK(s, alpha, x)
case s.mat.Uplo != blas.Upper:
panic(badSymTriangle)
case s.mat.N == n:
if s == x {
w := getWorkspaceSym(n, true)
w.SymRankK(w, alpha, x)
s.CopySym(w)
putWorkspaceSym(w)
} else {
if rs, ok := x.(RawSymmetricer); ok {
s.checkOverlap(rs.RawSymmetric())
}
// Only zero the upper triangle.
for i := 0; i < n; i++ {
ri := i * s.mat.Stride
zero(s.mat.Data[ri+i : ri+n])
}
s.SymRankK(s, alpha, x)
}
default:
panic(matrix.ErrShape)
}
}
// RankTwo performs a symmmetric rank-two update to the matrix a and stores
// the result in the receiver
// m = a + alpha * (x * y' + y * x')
func (s *SymDense) RankTwo(a Symmetric, alpha float64, x, y *Vector) {
n := s.mat.N
if x.Len() != n {
panic(matrix.ErrShape)
}
if y.Len() != n {
panic(matrix.ErrShape)
}
var w SymDense
if s == a {
w = *s
}
w.reuseAs(n)
if s != a {
if rs, ok := a.(RawSymmetricer); ok {
s.checkOverlap(rs.RawSymmetric())
}
w.CopySym(a)
}
blas64.Syr2(alpha, x.mat, y.mat, w.mat)
*s = w
return
}
// ScaleSym multiplies the elements of a by f, placing the result in the receiver.
func (s *SymDense) ScaleSym(f float64, a Symmetric) {
n := a.Symmetric()
s.reuseAs(n)
if a, ok := a.(RawSymmetricer); ok {
amat := a.RawSymmetric()
if s != a {
s.checkOverlap(amat)
}
for i := 0; i < n; i++ {
for j := i; j < n; j++ {
s.mat.Data[i*s.mat.Stride+j] = f * amat.Data[i*amat.Stride+j]
}
}
return
}
for i := 0; i < n; i++ {
for j := i; j < n; j++ {
s.mat.Data[i*s.mat.Stride+j] = f * a.At(i, j)
}
}
}
// SubsetSym extracts a subset of the rows and columns of the matrix a and stores
// the result in-place into the receiver. The resulting matrix size is
// len(set)×len(set). Specifically, at the conclusion of SubsetSym,
// s.At(i, j) equals a.At(set[i], set[j]). Note that the supplied set does not
// have to be a strict subset, dimension repeats are allowed.
func (s *SymDense) SubsetSym(a Symmetric, set []int) {
n := len(set)
na := a.Symmetric()
s.reuseAs(n)
var restore func()
if a == s {
s, restore = s.isolatedWorkspace(a)
defer restore()
}
if a, ok := a.(RawSymmetricer); ok {
raw := a.RawSymmetric()
if s != a {
s.checkOverlap(raw)
}
for i := 0; i < n; i++ {
ssub := s.mat.Data[i*s.mat.Stride : i*s.mat.Stride+n]
r := set[i]
rsub := raw.Data[r*raw.Stride : r*raw.Stride+na]
for j := i; j < n; j++ {
c := set[j]
if r <= c {
ssub[j] = rsub[c]
} else {
ssub[j] = raw.Data[c*raw.Stride+r]
}
}
}
return
}
for i := 0; i < n; i++ {
for j := i; j < n; j++ {
s.mat.Data[i*s.mat.Stride+j] = a.At(set[i], set[j])
}
}
}
// ViewSquare returns a view of the submatrix starting at {i, i} and extending
// for n rows and columns. ViewSquare panics if the view is outside the bounds
// of the receiver.
//
// ViewSquare is deprecated and should not be used. It will be removed at a later date.
func (s *SymDense) ViewSquare(i, n int) Matrix {
return s.SliceSquare(i, i+n)
}
// SliceSquare returns a new Matrix that shares backing data with the receiver.<|fim▁hole|>// and columns. The final row and column in the resulting matrix is k-1.
// SliceSquare panics with ErrIndexOutOfRange if the slice is outside the bounds
// of the receiver.
func (s *SymDense) SliceSquare(i, k int) Matrix {
sz := s.Symmetric()
if i < 0 || sz < i || k < i || sz < k {
panic(matrix.ErrIndexOutOfRange)
}
v := *s
v.mat.Data = s.mat.Data[i*s.mat.Stride+i : (k-1)*s.mat.Stride+k]
v.mat.N = k - i
v.cap = s.cap - i
return &v
}
// GrowSquare returns the receiver expanded by n rows and n columns. If the
// dimensions of the expanded matrix are outside the capacity of the receiver
// a new allocation is made, otherwise not. Note that the receiver itself is
// not modified during the call to GrowSquare.
func (s *SymDense) GrowSquare(n int) Matrix {
if n < 0 {
panic(matrix.ErrIndexOutOfRange)
}
if n == 0 {
return s
}
var v SymDense
n += s.mat.N
if n > s.cap {
v.mat = blas64.Symmetric{
N: n,
Stride: n,
Uplo: blas.Upper,
Data: make([]float64, n*n),
}
v.cap = n
// Copy elements, including those not currently visible. Use a temporary
// structure to avoid modifying the receiver.
var tmp SymDense
tmp.mat = blas64.Symmetric{
N: s.cap,
Stride: s.mat.Stride,
Data: s.mat.Data,
Uplo: s.mat.Uplo,
}
tmp.cap = s.cap
v.CopySym(&tmp)
return &v
}
v.mat = blas64.Symmetric{
N: n,
Stride: s.mat.Stride,
Uplo: blas.Upper,
Data: s.mat.Data[:(n-1)*s.mat.Stride+n],
}
v.cap = s.cap
return &v
}
// PowPSD computes a^pow where a is a positive symmetric definite matrix.
//
// PowPSD returns an error if the matrix is not not positive symmetric definite
// or the Eigendecomposition is not successful.
func (s *SymDense) PowPSD(a Symmetric, pow float64) error {
dim := a.Symmetric()
s.reuseAs(dim)
var eigen EigenSym
ok := eigen.Factorize(a, true)
if !ok {
return matrix.ErrFailedEigen
}
values := eigen.Values(nil)
for i, v := range values {
if v <= 0 {
return matrix.ErrNotPSD
}
values[i] = math.Pow(v, pow)
}
var u Dense
u.EigenvectorsSym(&eigen)
s.SymOuterK(values[0], u.ColView(0))
for i := 1; i < dim; i++ {
s.SymRankOne(s, values[i], u.ColView(i))
}
return nil
}<|fim▁end|> | // The returned matrix starts at {i,i} of the recevier and extends k-i rows |
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from django.db import models
class Category(models.Model):
name = models.CharField(max_length=30)<|fim▁hole|> active = models.BooleanField(default=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __unicode__(self):
return self.name
def __str__(self):
return self.name<|fim▁end|> | |
<|file_name|>pos_session_opening.py<|end_file_name|><|fim▁begin|>from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp.addons.point_of_sale.point_of_sale import pos_session
class pos_session_opening(osv.osv_memory):
_name = 'pos.session.opening'
_columns = {
'pos_config_id' : fields.many2one('pos.config', string='Point of Sale', required=True),
'pos_session_id' : fields.many2one('pos.session', string='PoS Session'),
'pos_state' : fields.related('pos_session_id', 'state',
type='selection',
selection=pos_session.POS_SESSION_STATE,
string='Session Status', readonly=True),
'pos_state_str' : fields.char('Status', readonly=True),
'show_config' : fields.boolean('Show Config', readonly=True),
'pos_session_name' : fields.related('pos_session_id', 'name', string="Session Name",
type='char', size=64, readonly=True),
'pos_session_username' : fields.related('pos_session_id', 'user_id', 'name',
type='char', size=64, readonly=True)
}
def open_ui(self, cr, uid, ids, context=None):
data = self.browse(cr, uid, ids[0], context=context)
context = dict(context or {})
context['active_id'] = data.pos_session_id.id
return {
'type' : 'ir.actions.act_url',
'url': '/pos/web/',
'target': 'self',
}
def open_existing_session_cb_close(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context=context)
wizard.pos_session_id.signal_workflow('cashbox_control')
return self.open_session_cb(cr, uid, ids, context)
def open_session_cb(self, cr, uid, ids, context=None):
assert len(ids) == 1, "you can open only one session at a time"
proxy = self.pool.get('pos.session')
wizard = self.browse(cr, uid, ids[0], context=context)
if not wizard.pos_session_id:
values = {
'user_id' : uid,
'config_id' : wizard.pos_config_id.id,
}
session_id = proxy.create(cr, uid, values, context=context)
s = proxy.browse(cr, uid, session_id, context=context)
if s.state=='opened':
return self.open_ui(cr, uid, ids, context=context)
return self._open_session(session_id)
return self._open_session(wizard.pos_session_id.id)
def open_existing_session_cb(self, cr, uid, ids, context=None):
assert len(ids) == 1
wizard = self.browse(cr, uid, ids[0], context=context)
return self._open_session(wizard.pos_session_id.id)
def _open_session(self, session_id):
return {
'name': _('Session'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'pos.session',
'res_id': session_id,
'view_id': False,
'type': 'ir.actions.act_window',
}
def on_change_config(self, cr, uid, ids, config_id, context=None):
result = {
'pos_session_id': False,
'pos_state': False,
'pos_state_str' : '',
'pos_session_username' : False,
'pos_session_name' : False,
}
if not config_id:
return {'value' : result}
proxy = self.pool.get('pos.session')
session_ids = proxy.search(cr, uid, [
('state', '!=', 'closed'),
('config_id', '=', config_id),
('user_id', '=', uid),
], context=context)
if session_ids:
session = proxy.browse(cr, uid, session_ids[0], context=context)
result['pos_state'] = str(session.state)
result['pos_state_str'] = dict(pos_session.POS_SESSION_STATE).get(session.state, '')
result['pos_session_id'] = session.id<|fim▁hole|> result['pos_session_name'] = session.name
result['pos_session_username'] = session.user_id.name
return {'value' : result}
def default_get(self, cr, uid, fieldnames, context=None):
so = self.pool.get('pos.session')
session_ids = so.search(cr, uid, [('state','<>','closed'), ('user_id','=',uid)], context=context)
if session_ids:
result = so.browse(cr, uid, session_ids[0], context=context).config_id.id
else:
current_user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
result = current_user.pos_config and current_user.pos_config.id or False
if not result:
r = self.pool.get('pos.config').search(cr, uid, [], context=context)
result = r and r[0] or False
count = self.pool.get('pos.config').search_count(cr, uid, [('state', '=', 'active')], context=context)
show_config = bool(count > 1)
return {
'pos_config_id' : result,
'show_config' : show_config,
}<|fim▁end|> | |
<|file_name|>ImageLoaderProvider.java<|end_file_name|><|fim▁begin|>package com.mypodcasts.support.injection;
import com.android.volley.RequestQueue;
import com.android.volley.toolbox.ImageLoader;
import javax.inject.Inject;
import javax.inject.Provider;
public class ImageLoaderProvider implements Provider<ImageLoader> {
@Inject
private ImageLoader.ImageCache imageCache;
<|fim▁hole|> private RequestQueue requestQueue;
@Override
public ImageLoader get() {
ImageLoader imageLoader = new ImageLoader(requestQueue, imageCache);
return imageLoader;
}
}<|fim▁end|> | @Inject |
<|file_name|>util.go<|end_file_name|><|fim▁begin|>/**
* Copyright 2013 @ S1N1 Team.
* name :
* author : jarryliu
* date : 2014-02-03 23:18
* description :
* history :
*/
package ols
import (
"bytes"
"fmt"
"github.com/atnet/gof"
"go2o/src/core/domain/interface/enum"
"go2o/src/core/service/dps"
"html/template"
"net/http"
"runtime/debug"<|fim▁hole|>// 处理自定义错误
func handleCustomError(w http.ResponseWriter, ctx gof.App, err error) {
if err != nil {
ctx.Template().Execute(w, gof.TemplateDataMap{
"error": err.Error(),
"statck": template.HTML(strings.Replace(string(debug.Stack()), "\n", "<br />", -1)),
},
"views/shop/ols/error.html")
}
}
func GetShops(c gof.App, partnerId int) []byte {
//分店
var buf *bytes.Buffer = bytes.NewBufferString("")
shops := dps.PartnerService.GetShopsOfPartner(partnerId)
if len(shops) == 0 {
return []byte("<div class=\"nodata noshop\">还未添加分店</div>")
}
buf.WriteString("<ul class=\"shops\">")
for i, v := range shops {
buf.WriteString(fmt.Sprintf(`<li class="s%d">
<div class="name"><span><strong>%s</strong></div>
<span class="shop-state shopstate%d">%s</span>
<div class="phone">%s</div>
<div class="address">%s</div>
</li>`, i+1, v.Name, v.State, enum.GetFrontShopStateName(v.State), v.Phone, v.Address))
}
buf.WriteString("</ul>")
return buf.Bytes()
}
func GetCategories(c gof.App, partnerId int, secret string) []byte {
var buf *bytes.Buffer = bytes.NewBufferString("")
categories := dps.SaleService.GetCategories(partnerId)
buf.WriteString(`<ul class="categories">
<li class="s0 current" val="0">
<div class="name"><span><strong>全部</strong></div>
</li>
`)
for i, v := range categories {
buf.WriteString(fmt.Sprintf(`<li class="s%d" val="%d">
<div class="name"><span><strong>%s</strong></div>
</li>`, i+1, v.Id, v.Name))
}
buf.WriteString("</ul>")
return buf.Bytes()
}<|fim▁end|> | "strings"
)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use parser::span::parse_spans;
use parser::Block;
use parser::Block::Paragraph;
use parser::Span::{Break, Text};
mod atx_header;
mod blockquote;
mod code_block;
mod hr;
mod link_reference;
mod ordered_list;
mod setext_header;
mod unordered_list;
use self::atx_header::parse_atx_header;
use self::blockquote::parse_blockquote;
use self::code_block::parse_code_block;
use self::hr::parse_hr;
use self::link_reference::parse_link_reference;
use self::ordered_list::parse_ordered_list;
use self::setext_header::parse_setext_header;
use self::unordered_list::parse_unordered_list;
pub fn parse_blocks(md: &str) -> Vec<Block> {
let mut blocks = vec![];
let mut t = vec![];
let lines: Vec<&str> = md.lines().collect();
let mut i = 0;
while i < lines.len() {
match parse_block(&lines[i..lines.len()]) {
// if a block is found
Some((block, consumed_lines)) => {
// the current paragraph has ended,
// push it to our blocks
if !t.is_empty() {
blocks.push(Paragraph(t));
t = Vec::new();
}
blocks.push(block);
i += consumed_lines;
}
// no known element, let's make this a paragraph
None => {
// empty linebreak => new paragraph
if lines[i].is_empty() && !t.is_empty() {
blocks.push(Paragraph(t));
t = Vec::new();
}
let spans = parse_spans(lines[i]);
// add a newline between linebreaks
// except when we have a break element or nothing
match (t.last(), spans.first()) {
(Some(&Break), _) => {}
(_, None) => {}
(None, _) => {}
_ => t.push(Text("\n".to_owned())),
}
t.extend_from_slice(&spans);
i += 1;
}
}
}
if !t.is_empty() {
blocks.push(Paragraph(t));
}
blocks
}
fn parse_block(lines: &[&str]) -> Option<(Block, usize)> {
pipe_opt!(
lines
=> parse_hr
=> parse_atx_header
=> parse_code_block
=> parse_blockquote
=> parse_unordered_list
=> parse_ordered_list
=> parse_link_reference
// Must not match before anything else. See: https://spec.commonmark.org/0.29/#setext-headings
=> parse_setext_header
)
}
#[cfg(test)]
mod test {
use super::parse_blocks;
use parser::Block::{Blockquote, CodeBlock, Header, Hr, Paragraph};
use parser::Span::Text;
#[test]
fn finds_atx_header() {
assert_eq!(
parse_blocks("### Test"),
vec![Header(vec![Text("Test".to_owned())], 3)]
);
}
#[test]
fn finds_setext_header() {
assert_eq!(
parse_blocks("Test\n-------"),
vec![Header(vec![Text("Test".to_owned())], 2)]
);
assert_eq!(
parse_blocks("Test\n======="),
vec![Header(vec![Text("Test".to_owned())], 1)]
);
}
#[test]
fn finds_hr() {
assert_eq!(parse_blocks("-------"), vec![Hr]);
assert_eq!(parse_blocks("======="), vec![Hr]);
}
#[test]
fn finds_code_block() {
assert_eq!(
parse_blocks(" this is code\n and this as well"),<|fim▁hole|> assert_eq!(
parse_blocks("```\nthis is code\nand this as well\n```"),
vec![CodeBlock(
Some(String::new()),
"this is code\nand this as well".to_owned()
)]
);
}
#[test]
fn finds_blockquotes() {
assert_eq!(
parse_blocks("> One Paragraph\n>\n> ## H2 \n>\n"),
vec![Blockquote(vec![
Paragraph(vec![Text("One Paragraph".to_owned())]),
Header(vec![Text("H2".to_owned())], 2)
])]
);
assert_eq!(
parse_blocks("> One Paragraph\n>\n> > Another blockquote\n>\n"),
vec![Blockquote(vec![
Paragraph(vec![Text("One Paragraph".to_owned())]),
Blockquote(vec![Paragraph(vec![Text("Another blockquote".to_owned())])])
])]
);
assert_eq!(
parse_blocks("> > One Paragraph\n> >\n> > Another blockquote\n>\n"),
vec![Blockquote(vec![Blockquote(vec![
Paragraph(vec![Text("One Paragraph".to_owned())]),
Paragraph(vec![Text("Another blockquote".to_owned())])
])])]
);
assert_eq!(
parse_blocks("> One Paragraph, just > text \n>\n"),
vec![Blockquote(vec![Paragraph(vec![Text(
"One Paragraph, just > text".to_owned()
)])])]
);
assert_eq!(
parse_blocks("> One Paragraph\n>\n> just > text \n>\n"),
vec![Blockquote(vec![
Paragraph(vec![Text("One Paragraph".to_owned())]),
Paragraph(vec![Text("just > text".to_owned())])
])]
);
}
}<|fim▁end|> | vec![CodeBlock(None, "this is code\nand this as well".to_owned())]
);
|
<|file_name|>include_bib.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# file include_bib.py
# This file is part of LyX, the document processor.
# Licence details can be found in the file COPYING.
# authors Richard Heck and [SchAirport]
# Full author contact details are available in file CREDITS
# This script is intended to include a BibTeX-generated biblography
# in a LaTeX file, as publishers often want. It can be run manually
# on an exported LaTeX file, though it needs to be compiled first,
# so the bbl file will exist.
#
# It should also be possible to create a LyX converter to run this
# automatically. To set it up, create a format "ltxbbl"; make sure to
# check it as a document format. Then create a LaTeX-->ltxbbl converter,
# with the command:
# python -tt $$s/scripts/include_bib.py $$i $$o
# and give it the flags:
# needaux,nice
# You'll then have it in the export menu.
#
# We do not activate this converter by default, because there are problems
# when one tries to use multiple bibliographies.
#
# Please report any problems on the devel list.
import sys, os
class secbib:
def __init__(self, start = -1, end = -1):
self.start = start
self.end = end
class BibError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
<|fim▁hole|> texlist = open(fil, 'r').readlines()
# multiple bibliographies
biblist = []
stylist = []
for i, line in enumerate(texlist):
if "\\bibliographystyle" in line:
stylist.append(i)
elif "\\bibliography" in line:
biblist.append(i)
elif "\\begin{btSect}" in line:
raise BibError("Cannot export sectioned bibliographies")
if len(biblist) > 1:
raise BibError("Cannot export multiple bibliographies.")
if not biblist:
raise BibError("No biliography found!")
bibpos = biblist[0]
newlist = texlist[0:bibpos]
bblfile = fil[:-4] + ".bbl"
bbllist = open(bblfile, 'r').readlines()
newlist += bbllist
newlist += texlist[bibpos + 1:]
outfile = open(out, 'w')
outfile.write("".join(newlist))
outfile.close()
return out
def usage():
print r'''
Usage: python include_bib.py file.tex [outfile.tex]
Includes the contents of file.bbl, which must exist in the
same directory as file.tex, in place of the \bibliography
command, and creates the new file outfile.tex. If no name
for that file is given, we create: file-bbl.tex.
'''
if __name__ == "__main__":
args = len(sys.argv)
if args <= 1 or args > 3:
usage()
sys.exit(0)
# we might should make sure this is a tex file....
infile = sys.argv[1]
if infile[-4:] != ".tex":
print "Error: " + infile + " is not a TeX file"
usage()
sys.exit(1)
if args == 3:
outfile = sys.argv[2]
else:
outfile = infile[:-4] + "-bbl.tex"
newfile = InsertBib(infile, outfile)
print "Wrote " + outfile<|fim▁end|> |
def InsertBib(fil, out):
''' Inserts the contents of the .bbl file instead of the bibliography in a new .tex file '''
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public<|fim▁hole|># License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
PROJECT_NAME = 'releng-treestatus'
APP_NAME = 'releng_treestatus'<|fim▁end|> | |
<|file_name|>FindNextHigherNumberWithSameDigits.py<|end_file_name|><|fim▁begin|>'''
Given a number, find the next higher number using only the digits in the given number.
For example if the given number is 1234, next higher number with same digits is 1243
'''
def FindNext(num):
number = str(num)
length = len(number)
for i in range(length-2,-1,-1):
current = number[i]
right = number[i+1]
if current < right:<|fim▁hole|> temp = ''.join(temp)
return int(number[:i]+Next+temp)
return num<|fim▁end|> | temp = sorted(number[i:])
Next = temp[temp.index(current)+1]
temp.remove(Next) |
<|file_name|>gh-subscribers-table.js<|end_file_name|><|fim▁begin|>define("ghost-admin/templates/components/gh-subscribers-table", ["exports"], function (exports) {
exports["default"] = Ember.HTMLBars.template((function () {
var child0 = (function () {
var child0 = (function () {
var child0 = (function () {
var child0 = (function () {
return {
meta: {
"fragmentReason": false,
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 6,
"column": 12
},
"end": {
"line": 8,
"column": 12
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createTextNode(" Loading...\n");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes() {
return [];
},
statements: [],
locals: [],
templates: []
};
})();
return {
meta: {
"fragmentReason": false,
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 5,
"column": 8
},
"end": {
"line": 9,
"column": 8
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes(dom, fragment, contextualElement) {
var morphs = new Array(1);
morphs[0] = dom.createMorphAt(fragment, 0, 0, contextualElement);
dom.insertBoundary(fragment, 0);
dom.insertBoundary(fragment, null);
return morphs;
},
statements: [["block", "body.loader", [], [], 0, null, ["loc", [null, [6, 12], [8, 28]]]]],
locals: [],
templates: [child0]
};
})();
var child1 = (function () {
var child0 = (function () {
var child0 = (function () {
return {
meta: {
"fragmentReason": false,
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 11,
"column": 16
},
"end": {
"line": 13,
"column": 16
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createTextNode(" No subscribers found.\n");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes() {
return [];
},
statements: [],
locals: [],
templates: []
};
})();
return {
meta: {
"fragmentReason": false,
"revision": "[email protected]",
"loc": {<|fim▁hole|> "line": 10,
"column": 12
},
"end": {
"line": 14,
"column": 12
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes(dom, fragment, contextualElement) {
var morphs = new Array(1);
morphs[0] = dom.createMorphAt(fragment, 0, 0, contextualElement);
dom.insertBoundary(fragment, 0);
dom.insertBoundary(fragment, null);
return morphs;
},
statements: [["block", "body.no-data", [], [], 0, null, ["loc", [null, [11, 16], [13, 33]]]]],
locals: [],
templates: [child0]
};
})();
return {
meta: {
"fragmentReason": false,
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 9,
"column": 8
},
"end": {
"line": 15,
"column": 8
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes(dom, fragment, contextualElement) {
var morphs = new Array(1);
morphs[0] = dom.createMorphAt(fragment, 0, 0, contextualElement);
dom.insertBoundary(fragment, 0);
dom.insertBoundary(fragment, null);
return morphs;
},
statements: [["block", "if", [["get", "table.isEmpty", ["loc", [null, [10, 18], [10, 31]]]]], [], 0, null, ["loc", [null, [10, 12], [14, 19]]]]],
locals: [],
templates: [child0]
};
})();
return {
meta: {
"fragmentReason": false,
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 4,
"column": 4
},
"end": {
"line": 16,
"column": 4
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 1,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes(dom, fragment, contextualElement) {
var morphs = new Array(1);
morphs[0] = dom.createMorphAt(fragment, 0, 0, contextualElement);
dom.insertBoundary(fragment, 0);
dom.insertBoundary(fragment, null);
return morphs;
},
statements: [["block", "if", [["get", "isLoading", ["loc", [null, [5, 14], [5, 23]]]]], [], 0, 1, ["loc", [null, [5, 8], [15, 15]]]]],
locals: ["body"],
templates: [child0, child1]
};
})();
return {
meta: {
"fragmentReason": {
"name": "missing-wrapper",
"problems": ["wrong-type", "multiple-nodes"]
},
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 1,
"column": 0
},
"end": {
"line": 17,
"column": 0
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 1,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createTextNode(" ");
dom.appendChild(el0, el1);
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
var el1 = dom.createTextNode("\n\n");
dom.appendChild(el0, el1);
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes(dom, fragment, contextualElement) {
var morphs = new Array(2);
morphs[0] = dom.createMorphAt(fragment, 1, 1, contextualElement);
morphs[1] = dom.createMorphAt(fragment, 3, 3, contextualElement);
dom.insertBoundary(fragment, null);
return morphs;
},
statements: [["inline", "t.head", [], ["onColumnClick", ["subexpr", "action", [["get", "sortByColumn", ["loc", [null, [2, 35], [2, 47]]]]], [], ["loc", [null, [2, 27], [2, 48]]]], "iconAscending", "icon-ascending", "iconDescending", "icon-descending"], ["loc", [null, [2, 4], [2, 114]]]], ["block", "t.body", [], ["canSelect", false, "tableActions", ["subexpr", "hash", [], ["delete", ["subexpr", "action", [["get", "delete", ["loc", [null, [4, 64], [4, 70]]]]], [], ["loc", [null, [4, 56], [4, 71]]]]], ["loc", [null, [4, 43], [4, 72]]]]], 0, null, ["loc", [null, [4, 4], [16, 15]]]]],
locals: ["t"],
templates: [child0]
};
})();
return {
meta: {
"fragmentReason": {
"name": "missing-wrapper",
"problems": ["wrong-type"]
},
"revision": "[email protected]",
"loc": {
"source": null,
"start": {
"line": 1,
"column": 0
},
"end": {
"line": 18,
"column": 0
}
},
"moduleName": "ghost-admin/templates/components/gh-subscribers-table.hbs"
},
isEmpty: false,
arity: 0,
cachedFragment: null,
hasRendered: false,
buildFragment: function buildFragment(dom) {
var el0 = dom.createDocumentFragment();
var el1 = dom.createComment("");
dom.appendChild(el0, el1);
return el0;
},
buildRenderNodes: function buildRenderNodes(dom, fragment, contextualElement) {
var morphs = new Array(1);
morphs[0] = dom.createMorphAt(fragment, 0, 0, contextualElement);
dom.insertBoundary(fragment, 0);
dom.insertBoundary(fragment, null);
return morphs;
},
statements: [["block", "gh-light-table", [["get", "table", ["loc", [null, [1, 18], [1, 23]]]]], ["scrollContainer", ".subscribers-table", "scrollBuffer", 100, "onScrolledToBottom", ["subexpr", "action", ["onScrolledToBottom"], [], ["loc", [null, [1, 97], [1, 126]]]]], 0, null, ["loc", [null, [1, 0], [17, 19]]]]],
locals: [],
templates: [child0]
};
})());
});<|fim▁end|> | "source": null,
"start": { |
<|file_name|>OauthController.java<|end_file_name|><|fim▁begin|>package lab.springboot.authorization.controller;
import lab.springboot.authorization.config.SecurityProperties;
import lombok.NonNull;
import lombok.RequiredArgsConstructor;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;<|fim▁hole|>@RequestMapping("/oauth")
@RequiredArgsConstructor
public class OauthController {
private final @NonNull SecurityProperties securityProperties;
@GetMapping("login")
public String loginView(Model model) {
model.addAttribute("action", securityProperties.getLoginProcessingUrl());
return "form-login";
}
}<|fim▁end|> |
@Controller |
<|file_name|>physicsImpostor.ts<|end_file_name|><|fim▁begin|>import { Nullable, IndicesArray } from "../types";
import { Logger } from "../Misc/logger";
import { ArrayTools } from "../Misc/arrayTools";
import { Vector3, Matrix, Quaternion } from "../Maths/math.vector";
import { TransformNode } from "../Meshes/transformNode";
import { AbstractMesh } from "../Meshes/abstractMesh";
import { Mesh } from "../Meshes/mesh";
import { Scene } from "../scene";
import { Bone } from "../Bones/bone";
import { BoundingInfo } from "../Culling/boundingInfo";
import { IPhysicsEngine } from "./IPhysicsEngine";
import { PhysicsJoint, PhysicsJointData } from "./physicsJoint";
import { Space } from "../Maths/math.axis";
/**
* The interface for the physics imposter parameters
* @see https://doc.babylonjs.com/how_to/using_the_physics_engine
*/
export interface PhysicsImpostorParameters {
/**
* The mass of the physics imposter
*/
mass: number;
/**
* The friction of the physics imposter
*/
friction?: number;
/**
* The coefficient of restitution of the physics imposter
*/
restitution?: number;
/**
* The native options of the physics imposter
*/
nativeOptions?: any;
/**
* Specifies if the parent should be ignored
*/
ignoreParent?: boolean;
/**
* Specifies if bi-directional transformations should be disabled
*/
disableBidirectionalTransformation?: boolean;
/**
* The pressure inside the physics imposter, soft object only
*/
pressure?: number;
/**
* The stiffness the physics imposter, soft object only
*/
stiffness?: number;
/**
* The number of iterations used in maintaining consistent vertex velocities, soft object only
*/
velocityIterations?: number;
/**
* The number of iterations used in maintaining consistent vertex positions, soft object only
*/
positionIterations?: number;
/**
* The number used to fix points on a cloth (0, 1, 2, 4, 8) or rope (0, 1, 2) only
* 0 None, 1, back left or top, 2, back right or bottom, 4, front left, 8, front right
* Add to fix multiple points
*/
fixedPoints?: number;
/**
* The collision margin around a soft object
*/
margin?: number;
/**
* The collision margin around a soft object
*/
damping?: number;
/**
* The path for a rope based on an extrusion
*/
path?: any;
/**
* The shape of an extrusion used for a rope based on an extrusion
*/
shape?: any;
}
/**
* Interface for a physics-enabled object
* @see https://doc.babylonjs.com/how_to/using_the_physics_engine
*/
export interface IPhysicsEnabledObject {
/**
* The position of the physics-enabled object
*/
position: Vector3;
/**
* The rotation of the physics-enabled object
*/
rotationQuaternion: Nullable<Quaternion>;
/**
* The scale of the physics-enabled object
*/
scaling: Vector3;
/**
* The rotation of the physics-enabled object
*/
rotation?: Vector3;
/**
* The parent of the physics-enabled object
*/
parent?: any;
/**
* The bounding info of the physics-enabled object
* @returns The bounding info of the physics-enabled object
*/
getBoundingInfo(): BoundingInfo;
/**
* Computes the world matrix
* @param force Specifies if the world matrix should be computed by force
* @returns A world matrix
*/
computeWorldMatrix(force: boolean): Matrix;
/**
* Gets the world matrix
* @returns A world matrix
*/
getWorldMatrix?(): Matrix;
/**
* Gets the child meshes
* @param directDescendantsOnly Specifies if only direct-descendants should be obtained
* @returns An array of abstract meshes
*/
getChildMeshes?(directDescendantsOnly?: boolean): Array<AbstractMesh>;
/**
* Gets the vertex data
* @param kind The type of vertex data
* @returns A nullable array of numbers, or a float32 array
*/
getVerticesData(kind: string): Nullable<Array<number> | Float32Array>;
/**
* Gets the indices from the mesh
* @returns A nullable array of index arrays
*/
getIndices?(): Nullable<IndicesArray>;
/**
* Gets the scene from the mesh
* @returns the indices array or null
*/
getScene?(): Scene;
/**
* Gets the absolute position from the mesh
* @returns the absolute position
*/
getAbsolutePosition(): Vector3;
/**
* Gets the absolute pivot point from the mesh
* @returns the absolute pivot point
*/
getAbsolutePivotPoint(): Vector3;
/**
* Rotates the mesh
* @param axis The axis of rotation
* @param amount The amount of rotation
* @param space The space of the rotation
* @returns The rotation transform node
*/
rotate(axis: Vector3, amount: number, space?: Space): TransformNode;
/**
* Translates the mesh
* @param axis The axis of translation
* @param distance The distance of translation
* @param space The space of the translation
* @returns The transform node
*/
translate(axis: Vector3, distance: number, space?: Space): TransformNode;
/**
* Sets the absolute position of the mesh
* @param absolutePosition The absolute position of the mesh
* @returns The transform node
*/
setAbsolutePosition(absolutePosition: Vector3): TransformNode;
/**
* Gets the class name of the mesh
* @returns The class name
*/
getClassName(): string;
}
Mesh._PhysicsImpostorParser = function (scene: Scene, physicObject: IPhysicsEnabledObject, jsonObject: any): PhysicsImpostor {
return new PhysicsImpostor(
physicObject,
jsonObject.physicsImpostor,
{
mass: jsonObject.physicsMass,
friction: jsonObject.physicsFriction,
restitution: jsonObject.physicsRestitution,
},
scene
);
};
/**
* Represents a physics imposter
* @see https://doc.babylonjs.com/how_to/using_the_physics_engine
*/
export class PhysicsImpostor {
/**
* The default object size of the imposter
*/
public static DEFAULT_OBJECT_SIZE: Vector3 = new Vector3(1, 1, 1);
/**
* The identity quaternion of the imposter
*/
public static IDENTITY_QUATERNION = Quaternion.Identity();
/** @hidden */
public _pluginData: any = {};
private _physicsEngine: Nullable<IPhysicsEngine>;
//The native cannon/oimo/energy physics body object.
private _physicsBody: any;
private _bodyUpdateRequired: boolean = false;
private _onBeforePhysicsStepCallbacks = new Array<(impostor: PhysicsImpostor) => void>();
private _onAfterPhysicsStepCallbacks = new Array<(impostor: PhysicsImpostor) => void>();
/** @hidden */
public _onPhysicsCollideCallbacks: Array<{ callback: (collider: PhysicsImpostor, collidedAgainst: PhysicsImpostor, point: Nullable<Vector3>) => void; otherImpostors: Array<PhysicsImpostor> }> = [];
private _deltaPosition: Vector3 = Vector3.Zero();
private _deltaRotation: Quaternion;
private _deltaRotationConjugated: Quaternion;
/** @hidden */
public _isFromLine: boolean;
//If set, this is this impostor's parent
private _parent: Nullable<PhysicsImpostor>;
private _isDisposed = false;
private static _tmpVecs: Vector3[] = ArrayTools.BuildArray(3, Vector3.Zero);
private static _tmpQuat: Quaternion = Quaternion.Identity();
/**
* Specifies if the physics imposter is disposed
*/
get isDisposed(): boolean {
return this._isDisposed;
}
/**
* Gets the mass of the physics imposter
*/
get mass(): number {
return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getBodyMass(this) : 0;
}
set mass(value: number) {
this.setMass(value);
}
/**
* Gets the coefficient of friction
*/
get friction(): number {
return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getBodyFriction(this) : 0;
}
/**
* Sets the coefficient of friction
*/
set friction(value: number) {
if (!this._physicsEngine) {
return;
}
this._physicsEngine.getPhysicsPlugin().setBodyFriction(this, value);
}
/**
* Gets the coefficient of restitution
*/
get restitution(): number {
return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getBodyRestitution(this) : 0;
}
/**
* Sets the coefficient of restitution
*/
set restitution(value: number) {
if (!this._physicsEngine) {
return;
}
this._physicsEngine.getPhysicsPlugin().setBodyRestitution(this, value);
}
/**
* Gets the pressure of a soft body; only supported by the AmmoJSPlugin
*/
get pressure(): number {
if (!this._physicsEngine) {
return 0;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.setBodyPressure) {
return 0;
}
return plugin.getBodyPressure!(this);
}
/**
* Sets the pressure of a soft body; only supported by the AmmoJSPlugin
*/
set pressure(value: number) {
if (!this._physicsEngine) {
return;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.setBodyPressure) {
return;
}
plugin.setBodyPressure!(this, value);
}
/**
* Gets the stiffness of a soft body; only supported by the AmmoJSPlugin
*/
get stiffness(): number {
if (!this._physicsEngine) {
return 0;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.getBodyStiffness) {
return 0;
}
return plugin.getBodyStiffness!(this);
}
/**
* Sets the stiffness of a soft body; only supported by the AmmoJSPlugin
*/
set stiffness(value: number) {
if (!this._physicsEngine) {
return;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.setBodyStiffness) {
return;
}
plugin.setBodyStiffness!(this, value);
}
/**
* Gets the velocityIterations of a soft body; only supported by the AmmoJSPlugin
*/
get velocityIterations(): number {
if (!this._physicsEngine) {
return 0;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.getBodyVelocityIterations) {
return 0;
}
return plugin.getBodyVelocityIterations!(this);
}
/**
* Sets the velocityIterations of a soft body; only supported by the AmmoJSPlugin
*/
set velocityIterations(value: number) {
if (!this._physicsEngine) {
return;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.setBodyVelocityIterations) {
return;
}
plugin.setBodyVelocityIterations!(this, value);
}
/**
* Gets the positionIterations of a soft body; only supported by the AmmoJSPlugin
*/
get positionIterations(): number {
if (!this._physicsEngine) {
return 0;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.getBodyPositionIterations) {
return 0;
}
return plugin.getBodyPositionIterations!(this);
}
/**
* Sets the positionIterations of a soft body; only supported by the AmmoJSPlugin
*/
set positionIterations(value: number) {
if (!this._physicsEngine) {
return;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.setBodyPositionIterations) {
return;
}
plugin.setBodyPositionIterations!(this, value);
}
/**
* The unique id of the physics imposter
* set by the physics engine when adding this impostor to the array
*/
public uniqueId: number;
/**
* @hidden
*/
public soft: boolean = false;
/**
* @hidden
*/
public segments: number = 0;
private _joints: Array<{
joint: PhysicsJoint;
otherImpostor: PhysicsImpostor;
}>;
/**
* Initializes the physics imposter
* @param object The physics-enabled object used as the physics imposter
* @param type The type of the physics imposter
* @param _options The options for the physics imposter
* @param _scene The Babylon scene
*/
constructor(
/**
* The physics-enabled object used as the physics imposter
*/
public object: IPhysicsEnabledObject,
/**
* The type of the physics imposter
*/
public type: number,
private _options: PhysicsImpostorParameters = { mass: 0 },
private _scene?: Scene
) {
//sanity check!
if (!this.object) {
Logger.Error("No object was provided. A physics object is obligatory");
return;
}
if (this.object.parent && _options.mass !== 0) {
Logger.Warn("A physics impostor has been created for an object which has a parent. Babylon physics currently works in local space so unexpected issues may occur.");
}
// Legacy support for old syntax.
if (!this._scene && object.getScene) {
this._scene = object.getScene();
}
if (!this._scene) {
return;
}
if (this.type > 100) {
this.soft = true;
}
this._physicsEngine = this._scene.getPhysicsEngine();
if (!this._physicsEngine) {
Logger.Error("Physics not enabled. Please use scene.enablePhysics(...) before creating impostors.");
} else {
//set the object's quaternion, if not set
if (!this.object.rotationQuaternion) {
if (this.object.rotation) {
this.object.rotationQuaternion = Quaternion.RotationYawPitchRoll(this.object.rotation.y, this.object.rotation.x, this.object.rotation.z);
} else {
this.object.rotationQuaternion = new Quaternion();
}
}
//default options params
this._options.mass = _options.mass === void 0 ? 0 : _options.mass;
this._options.friction = _options.friction === void 0 ? 0.2 : _options.friction;
this._options.restitution = _options.restitution === void 0 ? 0.2 : _options.restitution;
if (this.soft) {
//softbody mass must be above 0;
this._options.mass = this._options.mass > 0 ? this._options.mass : 1;
this._options.pressure = _options.pressure === void 0 ? 200 : _options.pressure;
this._options.stiffness = _options.stiffness === void 0 ? 1 : _options.stiffness;
this._options.velocityIterations = _options.velocityIterations === void 0 ? 20 : _options.velocityIterations;
this._options.positionIterations = _options.positionIterations === void 0 ? 20 : _options.positionIterations;
this._options.fixedPoints = _options.fixedPoints === void 0 ? 0 : _options.fixedPoints;
this._options.margin = _options.margin === void 0 ? 0 : _options.margin;
this._options.damping = _options.damping === void 0 ? 0 : _options.damping;
this._options.path = _options.path === void 0 ? null : _options.path;
this._options.shape = _options.shape === void 0 ? null : _options.shape;
}
this._joints = [];
//If the mesh has a parent, don't initialize the physicsBody. Instead wait for the parent to do that.
if (!this.object.parent || this._options.ignoreParent) {
this._init();
} else if (this.object.parent.physicsImpostor) {
Logger.Warn("You must affect impostors to children before affecting impostor to parent.");
}
}
}
/**
* This function will completly initialize this impostor.
* It will create a new body - but only if this mesh has no parent.
* If it has, this impostor will not be used other than to define the impostor
* of the child mesh.
* @hidden
*/
public _init() {
if (!this._physicsEngine) {
return;
}
this._physicsEngine.removeImpostor(this);
this.physicsBody = null;
this._parent = this._parent || this._getPhysicsParent();
if (!this._isDisposed && (!this.parent || this._options.ignoreParent)) {
this._physicsEngine.addImpostor(this);
}
}
private _getPhysicsParent(): Nullable<PhysicsImpostor> {
if (this.object.parent instanceof AbstractMesh) {
var parentMesh: AbstractMesh = <AbstractMesh>this.object.parent;
return parentMesh.physicsImpostor;
}
return null;
}
/**
* Should a new body be generated.
* @returns boolean specifying if body initialization is required
*/
public isBodyInitRequired(): boolean {
return this._bodyUpdateRequired || (!this._physicsBody && !this._parent);
}
/**
* Sets the updated scaling
* @param updated Specifies if the scaling is updated
*/
public setScalingUpdated() {
this.forceUpdate();
}
/**
* Force a regeneration of this or the parent's impostor's body.
* Use under cautious - This will remove all joints already implemented.
*/
public forceUpdate() {
this._init();
if (this.parent && !this._options.ignoreParent) {
this.parent.forceUpdate();
}
}
/*public get mesh(): AbstractMesh {
return this._mesh;
}*/
/**
* Gets the body that holds this impostor. Either its own, or its parent.
*/
public get physicsBody(): any {
return this._parent && !this._options.ignoreParent ? this._parent.physicsBody : this._physicsBody;
}
/**
* Get the parent of the physics imposter
* @returns Physics imposter or null
*/
public get parent(): Nullable<PhysicsImpostor> {
return !this._options.ignoreParent && this._parent ? this._parent : null;
}
/**
* Sets the parent of the physics imposter
*/
public set parent(value: Nullable<PhysicsImpostor>) {
this._parent = value;
}
/**
* Set the physics body. Used mainly by the physics engine/plugin
*/
public set physicsBody(physicsBody: any) {
if (this._physicsBody && this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().removePhysicsBody(this);
}
this._physicsBody = physicsBody;
this.resetUpdateFlags();
}
/**
* Resets the update flags
*/
public resetUpdateFlags() {
this._bodyUpdateRequired = false;
}
/**
* Gets the object extend size
* @returns the object extend size
*/
public getObjectExtendSize(): Vector3 {
if (this.object.getBoundingInfo) {
let q = this.object.rotationQuaternion;
const scaling = this.object.scaling.clone();
//reset rotation
this.object.rotationQuaternion = PhysicsImpostor.IDENTITY_QUATERNION;
//calculate the world matrix with no rotation
const worldMatrix = this.object.computeWorldMatrix && this.object.computeWorldMatrix(true);
if (worldMatrix) {
worldMatrix.decompose(scaling, undefined, undefined);
}
const boundingInfo = this.object.getBoundingInfo();
// get the global scaling of the object
const size = boundingInfo.boundingBox.extendSize.scale(2).multiplyInPlace(scaling);
//bring back the rotation
this.object.rotationQuaternion = q;
//calculate the world matrix with the new rotation
this.object.computeWorldMatrix && this.object.computeWorldMatrix(true);
return size;
} else {
return PhysicsImpostor.DEFAULT_OBJECT_SIZE;
}
}
/**
* Gets the object center
* @returns The object center
*/
public getObjectCenter(): Vector3 {
if (this.object.getBoundingInfo) {
let boundingInfo = this.object.getBoundingInfo();
return boundingInfo.boundingBox.centerWorld;
} else {
return this.object.position;
}
}
/**
* Get a specific parameter from the options parameters
* @param paramName The object parameter name
* @returns The object parameter
*/
public getParam(paramName: string): any {
return (<any>this._options)[paramName];
}
/**
* Sets a specific parameter in the options given to the physics plugin
* @param paramName The parameter name
* @param value The value of the parameter
*/
public setParam(paramName: string, value: number) {
(<any>this._options)[paramName] = value;
this._bodyUpdateRequired = true;
}
/**
* Specifically change the body's mass option. Won't recreate the physics body object
* @param mass The mass of the physics imposter
*/
public setMass(mass: number) {
if (this.getParam("mass") !== mass) {
this.setParam("mass", mass);
}
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().setBodyMass(this, mass);
}
}
/**
* Gets the linear velocity
* @returns linear velocity or null
*/
public getLinearVelocity(): Nullable<Vector3> {
return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getLinearVelocity(this) : Vector3.Zero();
}
/**
* Sets the linear velocity
* @param velocity linear velocity or null
*/
public setLinearVelocity(velocity: Nullable<Vector3>) {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().setLinearVelocity(this, velocity);
}
}
/**
* Gets the angular velocity
* @returns angular velocity or null
*/
public getAngularVelocity(): Nullable<Vector3> {
return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getAngularVelocity(this) : Vector3.Zero();
}
/**
* Sets the angular velocity
* @param velocity The velocity or null
*/
public setAngularVelocity(velocity: Nullable<Vector3>) {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().setAngularVelocity(this, velocity);
}
}
/**
* Execute a function with the physics plugin native code
* Provide a function the will have two variables - the world object and the physics body object
* @param func The function to execute with the physics plugin native code
*/
public executeNativeFunction(func: (world: any, physicsBody: any) => void) {
if (this._physicsEngine) {
func(this._physicsEngine.getPhysicsPlugin().world, this.physicsBody);
}
}
/**
* Register a function that will be executed before the physics world is stepping forward
* @param func The function to execute before the physics world is stepped forward
*/
public registerBeforePhysicsStep(func: (impostor: PhysicsImpostor) => void): void {
this._onBeforePhysicsStepCallbacks.push(func);
}
/**
* Unregister a function that will be executed before the physics world is stepping forward
* @param func The function to execute before the physics world is stepped forward
*/
public unregisterBeforePhysicsStep(func: (impostor: PhysicsImpostor) => void): void {
var index = this._onBeforePhysicsStepCallbacks.indexOf(func);
if (index > -1) {
this._onBeforePhysicsStepCallbacks.splice(index, 1);
} else {
Logger.Warn("Function to remove was not found");
}
}
/**
* Register a function that will be executed after the physics step
* @param func The function to execute after physics step
*/
public registerAfterPhysicsStep(func: (impostor: PhysicsImpostor) => void): void {
this._onAfterPhysicsStepCallbacks.push(func);
}
/**
* Unregisters a function that will be executed after the physics step
* @param func The function to execute after physics step
*/
public unregisterAfterPhysicsStep(func: (impostor: PhysicsImpostor) => void): void {
var index = this._onAfterPhysicsStepCallbacks.indexOf(func);
if (index > -1) {
this._onAfterPhysicsStepCallbacks.splice(index, 1);
} else {
Logger.Warn("Function to remove was not found");
}
}
/**
* register a function that will be executed when this impostor collides against a different body
* @param collideAgainst Physics imposter, or array of physics imposters to collide against
* @param func Callback that is executed on collision
*/
public registerOnPhysicsCollide(collideAgainst: PhysicsImpostor | Array<PhysicsImpostor>, func: (collider: PhysicsImpostor, collidedAgainst: PhysicsImpostor) => void): void {
var collidedAgainstList: Array<PhysicsImpostor> = collideAgainst instanceof Array ? <Array<PhysicsImpostor>>collideAgainst : [<PhysicsImpostor>collideAgainst];
this._onPhysicsCollideCallbacks.push({ callback: func, otherImpostors: collidedAgainstList });
}
/**
* Unregisters the physics imposter on contact
* @param collideAgainst The physics object to collide against
* @param func Callback to execute on collision
*/
public unregisterOnPhysicsCollide(collideAgainst: PhysicsImpostor | Array<PhysicsImpostor>, func: (collider: PhysicsImpostor, collidedAgainst: PhysicsImpostor | Array<PhysicsImpostor>) => void): void {
var collidedAgainstList: Array<PhysicsImpostor> = collideAgainst instanceof Array ? <Array<PhysicsImpostor>>collideAgainst : [<PhysicsImpostor>collideAgainst];
var index = -1;
let found = this._onPhysicsCollideCallbacks.some((cbDef, idx) => {
if (cbDef.callback === func && cbDef.otherImpostors.length === collidedAgainstList.length) {
// chcek the arrays match
let sameList = cbDef.otherImpostors.every((impostor) => {
return collidedAgainstList.indexOf(impostor) > -1;
});
if (sameList) {
index = idx;
}
return sameList;
}
return false;
});
if (found) {
this._onPhysicsCollideCallbacks.splice(index, 1);
} else {
Logger.Warn("Function to remove was not found");
}
}
//temp variables for parent rotation calculations
//private _mats: Array<Matrix> = [new Matrix(), new Matrix()];
private _tmpQuat: Quaternion = new Quaternion();
private _tmpQuat2: Quaternion = new Quaternion();
/**
* Get the parent rotation
* @returns The parent rotation
*/
public getParentsRotation(): Quaternion {
let parent = this.object.parent;
this._tmpQuat.copyFromFloats(0, 0, 0, 1);
while (parent) {
if (parent.rotationQuaternion) {
this._tmpQuat2.copyFrom(parent.rotationQuaternion);
} else {
Quaternion.RotationYawPitchRollToRef(parent.rotation.y, parent.rotation.x, parent.rotation.z, this._tmpQuat2);
}
this._tmpQuat.multiplyToRef(this._tmpQuat2, this._tmpQuat);
parent = parent.parent;
}
return this._tmpQuat;
}
/**
* this function is executed by the physics engine.
*/
public beforeStep = () => {
if (!this._physicsEngine) {
return;
}
this.object.translate(this._deltaPosition, -1);
this._deltaRotationConjugated && this.object.rotationQuaternion && this.object.rotationQuaternion.multiplyToRef(this._deltaRotationConjugated, this.object.rotationQuaternion);
this.object.computeWorldMatrix(false);
if (this.object.parent && this.object.rotationQuaternion) {
this.getParentsRotation();
this._tmpQuat.multiplyToRef(this.object.rotationQuaternion, this._tmpQuat);
} else {
this._tmpQuat.copyFrom(this.object.rotationQuaternion || new Quaternion());
}
if (!this._options.disableBidirectionalTransformation) {
this.object.rotationQuaternion && this._physicsEngine.getPhysicsPlugin().setPhysicsBodyTransformation(this, /*bInfo.boundingBox.centerWorld*/ this.object.getAbsolutePosition(), this._tmpQuat);
}
this._onBeforePhysicsStepCallbacks.forEach((func) => {
func(this);
});
};
/**
* this function is executed by the physics engine
*/
public afterStep = () => {
if (!this._physicsEngine) {
return;
}
this._onAfterPhysicsStepCallbacks.forEach((func) => {
func(this);
});
this._physicsEngine.getPhysicsPlugin().setTransformationFromPhysicsBody(this);
// object has now its world rotation. needs to be converted to local.
if (this.object.parent && this.object.rotationQuaternion) {
this.getParentsRotation();
this._tmpQuat.conjugateInPlace();
this._tmpQuat.multiplyToRef(this.object.rotationQuaternion, this.object.rotationQuaternion);
}
// take the position set and make it the absolute position of this object.
this.object.setAbsolutePosition(this.object.position);
this._deltaRotation && this.object.rotationQuaternion && this.object.rotationQuaternion.multiplyToRef(this._deltaRotation, this.object.rotationQuaternion);
this.object.translate(this._deltaPosition, 1);
};
/**
* Legacy collision detection event support
*/
public onCollideEvent: Nullable<(collider: PhysicsImpostor, collidedWith: PhysicsImpostor) => void> = null;
/**
* event and body object due to cannon's event-based architecture.
*/
public onCollide = (e: { body: any; point: Nullable<Vector3> }) => {
if (!this._onPhysicsCollideCallbacks.length && !this.onCollideEvent) {
return;
}
if (!this._physicsEngine) {
return;
}
var otherImpostor = this._physicsEngine.getImpostorWithPhysicsBody(e.body);
if (otherImpostor) {
// Legacy collision detection event support
if (this.onCollideEvent) {
this.onCollideEvent(this, otherImpostor);
}
this._onPhysicsCollideCallbacks
.filter((obj) => {
return obj.otherImpostors.indexOf(<PhysicsImpostor>otherImpostor) !== -1;
})
.forEach((obj) => {
obj.callback(this, <PhysicsImpostor>otherImpostor, e.point);
});
<|fim▁hole|> };
/**
* Apply a force
* @param force The force to apply
* @param contactPoint The contact point for the force
* @returns The physics imposter
*/
public applyForce(force: Vector3, contactPoint: Vector3): PhysicsImpostor {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().applyForce(this, force, contactPoint);
}
return this;
}
/**
* Apply an impulse
* @param force The impulse force
* @param contactPoint The contact point for the impulse force
* @returns The physics imposter
*/
public applyImpulse(force: Vector3, contactPoint: Vector3): PhysicsImpostor {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().applyImpulse(this, force, contactPoint);
}
return this;
}
/**
* A help function to create a joint
* @param otherImpostor A physics imposter used to create a joint
* @param jointType The type of joint
* @param jointData The data for the joint
* @returns The physics imposter
*/
public createJoint(otherImpostor: PhysicsImpostor, jointType: number, jointData: PhysicsJointData): PhysicsImpostor {
var joint = new PhysicsJoint(jointType, jointData);
this.addJoint(otherImpostor, joint);
return this;
}
/**
* Add a joint to this impostor with a different impostor
* @param otherImpostor A physics imposter used to add a joint
* @param joint The joint to add
* @returns The physics imposter
*/
public addJoint(otherImpostor: PhysicsImpostor, joint: PhysicsJoint): PhysicsImpostor {
this._joints.push({
otherImpostor: otherImpostor,
joint: joint,
});
if (this._physicsEngine) {
this._physicsEngine.addJoint(this, otherImpostor, joint);
}
return this;
}
/**
* Add an anchor to a cloth impostor
* @param otherImpostor rigid impostor to anchor to
* @param width ratio across width from 0 to 1
* @param height ratio up height from 0 to 1
* @param influence the elasticity between cloth impostor and anchor from 0, very stretchy to 1, little strech
* @param noCollisionBetweenLinkedBodies when true collisions between cloth impostor and anchor are ignored; default false
* @returns impostor the soft imposter
*/
public addAnchor(otherImpostor: PhysicsImpostor, width: number, height: number, influence: number, noCollisionBetweenLinkedBodies: boolean): PhysicsImpostor {
if (!this._physicsEngine) {
return this;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.appendAnchor) {
return this;
}
if (this._physicsEngine) {
plugin.appendAnchor!(this, otherImpostor, width, height, influence, noCollisionBetweenLinkedBodies);
}
return this;
}
/**
* Add a hook to a rope impostor
* @param otherImpostor rigid impostor to anchor to
* @param length ratio across rope from 0 to 1
* @param influence the elasticity between rope impostor and anchor from 0, very stretchy to 1, little strech
* @param noCollisionBetweenLinkedBodies when true collisions between soft impostor and anchor are ignored; default false
* @returns impostor the rope imposter
*/
public addHook(otherImpostor: PhysicsImpostor, length: number, influence: number, noCollisionBetweenLinkedBodies: boolean): PhysicsImpostor {
if (!this._physicsEngine) {
return this;
}
const plugin = this._physicsEngine.getPhysicsPlugin();
if (!plugin.appendAnchor) {
return this;
}
if (this._physicsEngine) {
plugin.appendHook!(this, otherImpostor, length, influence, noCollisionBetweenLinkedBodies);
}
return this;
}
/**
* Will keep this body still, in a sleep mode.
* @returns the physics imposter
*/
public sleep(): PhysicsImpostor {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().sleepBody(this);
}
return this;
}
/**
* Wake the body up.
* @returns The physics imposter
*/
public wakeUp(): PhysicsImpostor {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().wakeUpBody(this);
}
return this;
}
/**
* Clones the physics imposter
* @param newObject The physics imposter clones to this physics-enabled object
* @returns A nullable physics imposter
*/
public clone(newObject: IPhysicsEnabledObject): Nullable<PhysicsImpostor> {
if (!newObject) {
return null;
}
return new PhysicsImpostor(newObject, this.type, this._options, this._scene);
}
/**
* Disposes the physics imposter
*/
public dispose(/*disposeChildren: boolean = true*/) {
//no dispose if no physics engine is available.
if (!this._physicsEngine) {
return;
}
this._joints.forEach((j) => {
if (this._physicsEngine) {
this._physicsEngine.removeJoint(this, j.otherImpostor, j.joint);
}
});
//dispose the physics body
this._physicsEngine.removeImpostor(this);
if (this.parent) {
this.parent.forceUpdate();
} else {
/*this._object.getChildMeshes().forEach(function(mesh) {
if (mesh.physicsImpostor) {
if (disposeChildren) {
mesh.physicsImpostor.dispose();
mesh.physicsImpostor = null;
}
}
})*/
}
this._isDisposed = true;
}
/**
* Sets the delta position
* @param position The delta position amount
*/
public setDeltaPosition(position: Vector3) {
this._deltaPosition.copyFrom(position);
}
/**
* Sets the delta rotation
* @param rotation The delta rotation amount
*/
public setDeltaRotation(rotation: Quaternion) {
if (!this._deltaRotation) {
this._deltaRotation = new Quaternion();
}
this._deltaRotation.copyFrom(rotation);
this._deltaRotationConjugated = this._deltaRotation.conjugate();
}
/**
* Gets the box size of the physics imposter and stores the result in the input parameter
* @param result Stores the box size
* @returns The physics imposter
*/
public getBoxSizeToRef(result: Vector3): PhysicsImpostor {
if (this._physicsEngine) {
this._physicsEngine.getPhysicsPlugin().getBoxSizeToRef(this, result);
}
return this;
}
/**
* Gets the radius of the physics imposter
* @returns Radius of the physics imposter
*/
public getRadius(): number {
return this._physicsEngine ? this._physicsEngine.getPhysicsPlugin().getRadius(this) : 0;
}
/**
* Sync a bone with this impostor
* @param bone The bone to sync to the impostor.
* @param boneMesh The mesh that the bone is influencing.
* @param jointPivot The pivot of the joint / bone in local space.
* @param distToJoint Optional distance from the impostor to the joint.
* @param adjustRotation Optional quaternion for adjusting the local rotation of the bone.
*/
public syncBoneWithImpostor(bone: Bone, boneMesh: AbstractMesh, jointPivot: Vector3, distToJoint?: number, adjustRotation?: Quaternion) {
var tempVec = PhysicsImpostor._tmpVecs[0];
var mesh = <AbstractMesh>this.object;
if (mesh.rotationQuaternion) {
if (adjustRotation) {
var tempQuat = PhysicsImpostor._tmpQuat;
mesh.rotationQuaternion.multiplyToRef(adjustRotation, tempQuat);
bone.setRotationQuaternion(tempQuat, Space.WORLD, boneMesh);
} else {
bone.setRotationQuaternion(mesh.rotationQuaternion, Space.WORLD, boneMesh);
}
}
tempVec.x = 0;
tempVec.y = 0;
tempVec.z = 0;
if (jointPivot) {
tempVec.x = jointPivot.x;
tempVec.y = jointPivot.y;
tempVec.z = jointPivot.z;
bone.getDirectionToRef(tempVec, boneMesh, tempVec);
if (distToJoint === undefined || distToJoint === null) {
distToJoint = jointPivot.length();
}
tempVec.x *= distToJoint;
tempVec.y *= distToJoint;
tempVec.z *= distToJoint;
}
if (bone.getParent()) {
tempVec.addInPlace(mesh.getAbsolutePosition());
bone.setAbsolutePosition(tempVec, boneMesh);
} else {
boneMesh.setAbsolutePosition(mesh.getAbsolutePosition());
boneMesh.position.x -= tempVec.x;
boneMesh.position.y -= tempVec.y;
boneMesh.position.z -= tempVec.z;
}
}
/**
* Sync impostor to a bone
* @param bone The bone that the impostor will be synced to.
* @param boneMesh The mesh that the bone is influencing.
* @param jointPivot The pivot of the joint / bone in local space.
* @param distToJoint Optional distance from the impostor to the joint.
* @param adjustRotation Optional quaternion for adjusting the local rotation of the bone.
* @param boneAxis Optional vector3 axis the bone is aligned with
*/
public syncImpostorWithBone(bone: Bone, boneMesh: AbstractMesh, jointPivot: Vector3, distToJoint?: number, adjustRotation?: Quaternion, boneAxis?: Vector3) {
var mesh = <AbstractMesh>this.object;
if (mesh.rotationQuaternion) {
if (adjustRotation) {
var tempQuat = PhysicsImpostor._tmpQuat;
bone.getRotationQuaternionToRef(Space.WORLD, boneMesh, tempQuat);
tempQuat.multiplyToRef(adjustRotation, mesh.rotationQuaternion);
} else {
bone.getRotationQuaternionToRef(Space.WORLD, boneMesh, mesh.rotationQuaternion);
}
}
var pos = PhysicsImpostor._tmpVecs[0];
var boneDir = PhysicsImpostor._tmpVecs[1];
if (!boneAxis) {
boneAxis = PhysicsImpostor._tmpVecs[2];
boneAxis.x = 0;
boneAxis.y = 1;
boneAxis.z = 0;
}
bone.getDirectionToRef(boneAxis, boneMesh, boneDir);
bone.getAbsolutePositionToRef(boneMesh, pos);
if ((distToJoint === undefined || distToJoint === null) && jointPivot) {
distToJoint = jointPivot.length();
}
if (distToJoint !== undefined && distToJoint !== null) {
pos.x += boneDir.x * distToJoint;
pos.y += boneDir.y * distToJoint;
pos.z += boneDir.z * distToJoint;
}
mesh.setAbsolutePosition(pos);
}
//Impostor types
/**
* No-Imposter type
*/
public static NoImpostor = 0;
/**
* Sphere-Imposter type
*/
public static SphereImpostor = 1;
/**
* Box-Imposter type
*/
public static BoxImpostor = 2;
/**
* Plane-Imposter type
*/
public static PlaneImpostor = 3;
/**
* Mesh-imposter type
*/
public static MeshImpostor = 4;
/**
* Capsule-Impostor type (Ammo.js plugin only)
*/
public static CapsuleImpostor = 6;
/**
* Cylinder-Imposter type
*/
public static CylinderImpostor = 7;
/**
* Particle-Imposter type
*/
public static ParticleImpostor = 8;
/**
* Heightmap-Imposter type
*/
public static HeightmapImpostor = 9;
/**
* ConvexHull-Impostor type (Ammo.js plugin only)
*/
public static ConvexHullImpostor = 10;
/**
* Custom-Imposter type (Ammo.js plugin only)
*/
public static CustomImpostor = 100;
/**
* Rope-Imposter type
*/
public static RopeImpostor = 101;
/**
* Cloth-Imposter type
*/
public static ClothImpostor = 102;
/**
* Softbody-Imposter type
*/
public static SoftbodyImpostor = 103;
}<|fim▁end|> | }
|
<|file_name|>PtReportHistoryQueryHelper.java<|end_file_name|><|fim▁begin|>package com.yh.hr.res.pt.queryhelper;
import com.yh.hr.res.pt.dto.PtReportHistoryDTO;
import com.yh.platform.core.dao.DaoUtil;
import com.yh.platform.core.exception.ServiceException;
import com.yh.platform.core.util.BeanHelper;
import org.apache.commons.collections.CollectionUtils;
import java.util.List;
/**
* 获取报表历史数据
*/<|fim▁hole|> /**
* 查询报表历史
* @param taskOid
* @param reportType
* @return
* @throws ServiceException
*/
public static PtReportHistoryDTO getPtReportHistory(Long taskOid, String reportType) throws ServiceException {
final StringBuffer hBuffer = new StringBuffer("from PtReportHistory pt where 1 =1 ");
hBuffer.append(" and pt.taskOid =" + taskOid);
hBuffer.append(" and pt.reportType ='" + reportType+"'");
List<PtReportHistoryDTO> list = BeanHelper.copyProperties(DaoUtil.find(hBuffer.toString()), PtReportHistoryDTO.class);
if(CollectionUtils.isNotEmpty(list))
{
return list.get(0);
}
return null;
}
}<|fim▁end|> | public class PtReportHistoryQueryHelper {
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""
Application urlconfig
"""
from __future__ import absolute_import
from django.conf.urls import url
from . import views
urlpatterns = [
url(
r"^(?P<uuid>[0-9a-f-]{36})/$",
views.RateView.as_view(),
name="rate"<|fim▁hole|> ),
url(
r"^2/(?P<uuid>[0-9a-f-]{36})/$",
views.Rate2View.as_view(),
name="rate2"
)
]<|fim▁end|> | |
<|file_name|>Timer.presets.ts<|end_file_name|><|fim▁begin|>/**
* 프리셋 설정 폼 동작 구성
*/
const { ipcRenderer } = require('electron');
const { getConfig, setConfig } = require('./Timer');
export default () => {
const $presets = $('#presets');<|fim▁hole|> const $delete = $presets.find('.delete');
const $name = $presetDialog.find('INPUT.name');
const $export = $presetDialog.find('INPUT.export');
const $exportInvalid = $presetDialog.find('.export-invalid');
const $save = $presetDialog.find('.save');
// 프리셋 폼 선택기 구성
const drawPresetSelect = () => {
const $select = $('<select class="form-control preset"/>');
const presets: Preset[] = getConfig('presets');
const presetIdx = getConfig('presetIdx');
$presets.find('.select').empty().append($select);
// Preset 별 OPTION 생성
presets.forEach(({ name }, idx) => $(`<option value="${idx}">${name}</option>`).appendTo($select));
$select.val(presetIdx);
};
const getBase64 = (o: any) => Buffer.from(JSON.stringify(o)).toString('base64');
const parseBase64 = (base64: string) => {
try {
const result = JSON.parse(Buffer.from(base64, 'base64').toString('utf8'));
$exportInvalid.text('');
return result;
} catch (e) { // 잘못된 base64
console.error(e);
$exportInvalid.text('Invalid Import config');
}
};
// 프리셋 편집
const onEdit = () => {
const config = getConfig('all');
const base64 = getBase64(config);
$name.val(config.name);
$export.val(base64);
$exportInvalid.text('');
$presetDialog.modal('show');
};
// 프리셋 저장
const onSave = () => {
const config = getConfig('all');
const base64 = getBase64(config);
const newConfig: string = $export.val();
config.name = $name.val();
// base64가 변경된 경우 저장
newConfig !== base64 && Object.assign(config, parseBase64(newConfig) || {});
ipcRenderer.send('changePreset', 'save', config);
$presetDialog.modal('hide');
};
// 새 프리셋 추가
const onNew = () => ipcRenderer.send('changePreset', 'new');
const onDelete = () => {
const config = getConfig('all');
if (!confirm(`Do you want to delete this preset?\n\n${config.name}`)) return;
ipcRenderer.send('changePreset', 'delete');
};
drawPresetSelect();
// 메인 프로세스에서 Config 변경시 drawPresetSelect 실행
ipcRenderer.on('onChangeConfig', drawPresetSelect);
// 프리셋이 변경되면 presetIdx를 저장한다.
$presets.on('change', '.preset', ({ target }) => {
setConfig('presetIdx', (<HTMLInputElement>target).value);
});
$edit.on('click', onEdit);
$new.on('click', onNew);
$delete.on('click', onDelete);
$save.on('click', onSave);
$export.on('keyup', ({ target }) => parseBase64((<HTMLInputElement>target).value));
};<|fim▁end|> | const $presetDialog = $('#presetDialog');
const $edit = $presets.find('.edit');
const $new = $presets.find('.new'); |
<|file_name|>run.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|><|fim▁end|> | * Run the APP
*/
app.run(); |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.