prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>test-fs-open-close.js<|end_file_name|><|fim▁begin|>// Copyright IBM Corp. 2014. All Rights Reserved.
// Node module: async-tracker
// This file is licensed under the MIT License.
// License text available at https://opensource.org/licenses/MIT
var assert = require('assert');
require('../index.js');
var fs = require('fs');
var util = require('util');
var cnt = 0;
var Listener = function() {
var evtName = asyncTracker.events.fs.open;
this.deferredCreated = {};
this.invokeDeferred = {};
this.deferredReleased = {};
this.deferredCreated[evtName] = function(fName, fId, args) {
assert.equal(cnt, 0);
cnt += 1;
};
this.deferredCreated['default'] = function(fName, fId, args) {
assert.equal(cnt, 4);
cnt += 1;
};
this.invokeDeferred[evtName] = function(fName, fId, next) {
assert.equal(cnt, 2);
cnt += 1;
next();
};
this.invokeDeferred['default'] = function(fName, fId, next) {
assert.equal(cnt, 6);
cnt += 1;
next();
};
this.deferredReleased[evtName] = function(fName, fId) {
assert.equal(cnt, 5);
cnt += 1;
};
this.deferredReleased['default'] = function(fName, fId) {
assert.equal(cnt, 7);
cnt += 1;
};
this.objectCreated = function(obj) {
assert.equal(cnt, 1);
cnt += 1;
};
this.objectReleased = function(obj) {
assert.equal(cnt, 3);
cnt += 1;
};
};
var listener = new Listener();
asyncTracker.addListener(listener, 'listener');
function closeCallback() {
}
function openCallback(err, fd) {<|fim▁hole|>}
fs.open(__filename, 'r', openCallback);
asyncTracker.removeListener('listener');<|fim▁end|>
|
fs.close(fd, closeCallback);
|
<|file_name|>resultsChartController.js<|end_file_name|><|fim▁begin|>(function(){
angular
.module("InteractionDesign")
.controller("ResultsChartController", ResultsChartController);
function ResultsChartController($scope, $location) {
$scope.$location = $location;
var data1 = [
{ x: new Date(2012, 00, 1), y: 12 },
{ x: new Date(2012, 00, 2), y: 13 },
{ x: new Date(2012, 00, 3), y: 16 },
{ x: new Date(2012, 00, 4), y: 37 },
{ x: new Date(2012, 00, 5), y: 39 },
{ x: new Date(2012, 00, 6), y: 42 },
{ x: new Date(2012, 00, 7), y: 25 },
{ x: new Date(2012, 00, 8), y: 18 },
{ x: new Date(2012, 00, 9), y: 22 },
{ x: new Date(2012, 00, 10), y: 25 },
{ x: new Date(2012, 00, 11), y: 25 },
{ x: new Date(2012, 00, 12), y: 28 }
];
var data2 = [
{ x: new Date(2012, 00, 1), y: 50 },
{ x: new Date(2012, 00, 2), y: 45 },
{ x: new Date(2012, 00, 3), y: 40 },
{ x: new Date(2012, 00, 4), y: 25 },
{ x: new Date(2012, 00, 5), y: 24 },
{ x: new Date(2012, 00, 6), y: 29 },
{ x: new Date(2012, 00, 7), y: 43 },
{ x: new Date(2012, 00, 8), y: 35 },
{ x: new Date(2012, 00, 9), y: 34 },
{ x: new Date(2012, 00, 10), y: 39 },
{ x: new Date(2012, 00, 11), y: 43 },
{ x: new Date(2012, 00, 12), y: 48 }
];
var data3 = [
{ x: new Date(2012, 00, 1), y: 26 },
{ x: new Date(2012, 00, 2), y: 25 },
{ x: new Date(2012, 00, 3), y: 23 },
{ x: new Date(2012, 00, 4), y: 20 },
{ x: new Date(2012, 00, 5), y: 17 },
{ x: new Date(2012, 00, 6), y: 17 },
{ x: new Date(2012, 00, 7), y: 17 },
{ x: new Date(2012, 00, 8), y: 18 },<|fim▁hole|> { x: new Date(2012, 00, 9), y: 12 },
{ x: new Date(2012, 00, 10), y: 8 },
{ x: new Date(2012, 00, 11), y: 8 },
{ x: new Date(2012, 00, 12), y: 12 }
];
var data4 = [
{ x: new Date(2012, 00, 1), y: 25 },
{ x: new Date(2012, 00, 2), y: 26 },
{ x: new Date(2012, 00, 3), y: 28 },
{ x: new Date(2012, 00, 4), y: 28 },
{ x: new Date(2012, 00, 5), y: 30 },
{ x: new Date(2012, 00, 6), y: 35 },
{ x: new Date(2012, 00, 7), y: 25 },
{ x: new Date(2012, 00, 8), y: 18 },
{ x: new Date(2012, 00, 9), y: 22 },
{ x: new Date(2012, 00, 10), y: 25 },
{ x: new Date(2012, 00, 11), y: 25 },
{ x: new Date(2012, 00, 12), y: 28 }
];
var data5 = [
{ x: new Date(2012, 00, 1), y: 25 },
{ x: new Date(2012, 00, 2), y: 24 },
{ x: new Date(2012, 00, 3), y: 45 },
{ x: new Date(2012, 00, 4), y: 29 },
{ x: new Date(2012, 00, 5), y: 24 },
{ x: new Date(2012, 00, 6), y: 29 },
{ x: new Date(2012, 00, 7), y: 18 },
{ x: new Date(2012, 00, 8), y: 22 },
{ x: new Date(2012, 00, 9), y: 25 },
{ x: new Date(2012, 00, 10), y: 25 },
{ x: new Date(2012, 00, 11), y: 28 },
{ x: new Date(2012, 00, 12), y: 28 }
];
var data6 = [
{ x: new Date(2012, 00, 1), y: 26 },
{ x: new Date(2012, 00, 2), y: 25 },
{ x: new Date(2012, 00, 3), y: 23 },
{ x: new Date(2012, 00, 4), y: 50 },
{ x: new Date(2012, 00, 5), y: 25 },
{ x: new Date(2012, 00, 6), y: 29 },
{ x: new Date(2012, 00, 7), y: 25 },
{ x: new Date(2012, 00, 8), y: 28 },
{ x: new Date(2012, 00, 9), y: 12 },
{ x: new Date(2012, 00, 10), y: 8 },
{ x: new Date(2012, 00, 11), y: 8 },
{ x: new Date(2012, 00, 12), y: 12 }
];
$scope.filters = [
{name: "Age", selected: false, options: [
{name: "18-24", selected: false},
{name: "25-34", selected: false},
{name: "35-49", selected: false},
{name: "50-65", selected: false},
{name: "65+", selected: false}
]},
{name: "Occupation", selected: false, options: [
{name: "Students", selected: false},
{name: "Blue-Collar", selected: false},
{name: "White-Collar", selected: false}
]},
{name: "Gender", selected: false, options: [
{name: "Male", selected: false},
{name: "Female", selected: false}
]},
{name: "State", selected: false, options: [
{name: "Alabama", selected: false},
{name: "Arkansas", selected: false},
{name: "New York", selected: false},
{name: "California", selected: false},
{name: "Massachusetts", selected: false}
]},
{name: "Political Party", selected: false, options: [
{name: "Democrat", selected: false},
{name: "Republican", selected: false}
]},
];
$scope.clickFilter = function(filter) {
for (filterIdx in $scope.filters) {
var theFilter = $scope.filters[filterIdx];
if (theFilter.name == filter.name && theFilter.selected == false) {
theFilter.selected = true;
}
else {
theFilter.selected = false;
for (optionIdx in theFilter.options) {
theFilter.options[optionIdx].selected = false;
}
}
}
}
$scope.clickSubOption = function(filter, subOption) {
for (filterIdx in $scope.filters) {
var theFilter = $scope.filters[filterIdx];
if (theFilter.name == filter.name && theFilter.selected == true) {
for (optionIdx in theFilter.options) {
var option = theFilter.options[optionIdx];
if (option.name == subOption.name && option.selected == false) {
option.selected = true;
}
else {
option.selected = false;
}
}
}
}
if (subOption.name == "18-24" && subOption.selected == true) {
reloadChart(data4, data5, data6, false, "Results 18-24");
}
else {
reloadChart(data1, data2, data3, false, "Results");
}
}
function reloadChart(data_1, data_2, data_3, animated, title) {
var chart = new CanvasJS.Chart("chartContainer",
{
theme: "theme2",
title:{
text: title
},
animationEnabled: animated,
axisX: {
valueFormatString: "MMM",
interval:1,
intervalType: "month"
},
axisY:{
includeZero: false
},
data: [
{
type: "line",
showInLegend: true,
lineThickness: 2,
name: "Hillary Clinton",
dataPoints: data_1
},
{
type: "line",
showInLegend: true,
lineThickness: 2,
name: "Bernie Sanders",
dataPoints: data_2
},
{
type: "line",
showInLegend: true,
lineThickness: 2,
name: "Donald Trump",
dataPoints: data_3
}
]
});
chart.render();
}
reloadChart(data1, data2, data3, true, "Results");
}
})();<|fim▁end|>
| |
<|file_name|>socket.rs<|end_file_name|><|fim▁begin|>// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
use std::convert::From;
use std::io;
use std::io::{ErrorKind, Read, Write};
use std::net::{Shutdown, TcpStream};
use {TransportErrorKind, new_transport_error};
use super::{ReadHalf, TIoChannel, WriteHalf};
/// Bidirectional TCP/IP channel.
///
/// # Examples<|fim▁hole|>/// ```no_run
/// use std::io::{Read, Write};
/// use thrift::transport::TTcpChannel;
///
/// let mut c = TTcpChannel::new();
/// c.open("localhost:9090").unwrap();
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
///
/// Create a `TTcpChannel` by wrapping an existing `TcpStream`.
///
/// ```no_run
/// use std::io::{Read, Write};
/// use std::net::TcpStream;
/// use thrift::transport::TTcpChannel;
///
/// let stream = TcpStream::connect("127.0.0.1:9189").unwrap();
///
/// // no need to call c.open() since we've already connected above
/// let mut c = TTcpChannel::with_stream(stream);
///
/// let mut buf = vec![0u8; 4];
/// c.read(&mut buf).unwrap();
/// c.write(&vec![0, 1, 2]).unwrap();
/// ```
#[derive(Debug, Default)]
pub struct TTcpChannel {
stream: Option<TcpStream>,
}
impl TTcpChannel {
/// Create an uninitialized `TTcpChannel`.
///
/// The returned instance must be opened using `TTcpChannel::open(...)`
/// before it can be used.
pub fn new() -> TTcpChannel {
TTcpChannel { stream: None }
}
/// Create a `TTcpChannel` that wraps an existing `TcpStream`.
///
/// The passed-in stream is assumed to have been opened before being wrapped
/// by the created `TTcpChannel` instance.
pub fn with_stream(stream: TcpStream) -> TTcpChannel {
TTcpChannel { stream: Some(stream) }
}
/// Connect to `remote_address`, which should have the form `host:port`.
pub fn open(&mut self, remote_address: &str) -> ::Result<()> {
if self.stream.is_some() {
Err(
new_transport_error(
TransportErrorKind::AlreadyOpen,
"tcp connection previously opened",
),
)
} else {
match TcpStream::connect(&remote_address) {
Ok(s) => {
self.stream = Some(s);
Ok(())
}
Err(e) => Err(From::from(e)),
}
}
}
/// Shut down this channel.
///
/// Both send and receive halves are closed, and this instance can no
/// longer be used to communicate with another endpoint.
pub fn close(&mut self) -> ::Result<()> {
self.if_set(|s| s.shutdown(Shutdown::Both))
.map_err(From::from)
}
fn if_set<F, T>(&mut self, mut stream_operation: F) -> io::Result<T>
where
F: FnMut(&mut TcpStream) -> io::Result<T>,
{
if let Some(ref mut s) = self.stream {
stream_operation(s)
} else {
Err(io::Error::new(ErrorKind::NotConnected, "tcp endpoint not connected"),)
}
}
}
impl TIoChannel for TTcpChannel {
fn split(self) -> ::Result<(ReadHalf<Self>, WriteHalf<Self>)>
where
Self: Sized,
{
let mut s = self;
s.stream
.as_mut()
.and_then(|s| s.try_clone().ok())
.map(
|cloned| {
(ReadHalf { handle: TTcpChannel { stream: s.stream.take() } },
WriteHalf { handle: TTcpChannel { stream: Some(cloned) } })
},
)
.ok_or_else(
|| {
new_transport_error(
TransportErrorKind::Unknown,
"cannot clone underlying tcp stream",
)
},
)
}
}
impl Read for TTcpChannel {
fn read(&mut self, b: &mut [u8]) -> io::Result<usize> {
self.if_set(|s| s.read(b))
}
}
impl Write for TTcpChannel {
fn write(&mut self, b: &[u8]) -> io::Result<usize> {
self.if_set(|s| s.write_all(b)).map(|_| b.len())
}
fn flush(&mut self) -> io::Result<()> {
self.if_set(|s| s.flush())
}
}<|fim▁end|>
|
///
/// Create a `TTcpChannel`.
///
|
<|file_name|>logctx.go<|end_file_name|><|fim▁begin|>package logctx
import (
"net/http"
"sync"
"time"
"github.com/lestrrat/go-apache-logformat/internal/httputil"
)
type clock interface {
Now() time.Time
}
type defaultClock struct{}
func (_ defaultClock) Now() time.Time {
return time.Now()
}
var Clock clock = defaultClock{}
type Context struct {
elapsedTime time.Duration
request *http.Request
requestTime time.Time
responseContentLength int64
responseHeader http.Header
responseStatus int
responseTime time.Time
}
var pool = sync.Pool{New: allocCtx}
func allocCtx() interface{} {
return &Context{}
}
func Get(r *http.Request) *Context {
ctx := pool.Get().(*Context)
ctx.request = r
ctx.requestTime = Clock.Now()
return ctx
}
func Release(ctx *Context) {
ctx.Reset()
pool.Put(ctx)
}
func (ctx *Context) ElapsedTime() time.Duration {
return ctx.elapsedTime
}
func (ctx *Context) Request() *http.Request {
return ctx.request
}
func (ctx *Context) RequestTime() time.Time {
return ctx.requestTime
}
func (ctx *Context) ResponseContentLength() int64 {
return ctx.responseContentLength
}
func (ctx *Context) ResponseHeader() http.Header {
return ctx.responseHeader
}
func (ctx *Context) ResponseStatus() int {
return ctx.responseStatus
}
func (ctx *Context) ResponseTime() time.Time {
return ctx.responseTime
}
func (ctx *Context) Reset() {
ctx.elapsedTime = time.Duration(0)
ctx.request = nil
ctx.requestTime = time.Time{}
ctx.responseContentLength = 0
ctx.responseHeader = http.Header{}
ctx.responseStatus = http.StatusOK
ctx.responseTime = time.Time{}
}
func (ctx *Context) Finalize(wrapped *httputil.ResponseWriter) {
ctx.responseTime = Clock.Now()
ctx.elapsedTime = ctx.responseTime.Sub(ctx.requestTime)<|fim▁hole|>}<|fim▁end|>
|
ctx.responseContentLength = wrapped.ContentLength()
ctx.responseHeader = wrapped.Header()
ctx.responseStatus = wrapped.StatusCode()
|
<|file_name|>povray.js<|end_file_name|><|fim▁begin|>// Generated by CoffeeScript 1.6.3
(function() {
var Stl, stl_parser;
stl_parser = require('../parser/stl_parser');
Stl = (function() {
function Stl() {}
return Stl;
})();
Stl.PovRay = (function() {
function PovRay() {}
PovRay.prototype._povHeaders = function(name) {
return "#declare " + name + " = mesh {\n";
};
PovRay.prototype._povFooters = function() {
return "}";
};
PovRay.prototype.convertFile = function(filePath, callback, progressCb) {
var output,
_this = this;
output = "";
return stl_parser.parseFile(filePath, function(err, polygons, name) {
var unique_name;<|fim▁hole|> if (err != null) {
callback(err);
return;
}
unique_name = '__' + name + '__';
output += _this._povFooters();
return callback(null, output, unique_name);
}, function(err, polygon, name) {
var povPolygon, unique_name;
unique_name = '__' + name + '__';
if (output.length === 0) {
output += _this._povHeaders(unique_name);
}
povPolygon = _this.convertPolygon(polygon);
output += povPolygon;
if (progressCb != null) {
return progressCb(err, povPolygon, unique_name);
}
});
};
PovRay.prototype.convertPolygon = function(polygon) {
var idx, output, vertex, _i, _len, _ref;
output = "";
output += " triangle {\n";
_ref = polygon.verticies;
for (idx = _i = 0, _len = _ref.length; _i < _len; idx = ++_i) {
vertex = _ref[idx];
output += " <" + vertex[0] + ", " + (-vertex[1]) + ", " + vertex[2] + ">";
if (idx !== (polygon.verticies.length - 1)) {
output += ",\n";
}
}
output += " }\n";
return output;
};
return PovRay;
})();
module.exports = new Stl.PovRay();
}).call(this);<|fim▁end|>
| |
<|file_name|>Square.java<|end_file_name|><|fim▁begin|>package ru.stqa.pft.sandbox;
/**
* Created by Даниил on 06.05.2017.
*/
public class Square {
public double l;
public Square(double l) {
this.l = l;
}
public double area() {<|fim▁hole|>}<|fim▁end|>
|
return this.l * this.l;
}
|
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># Generated by Django 3.1 on 2020-08-13 19:23
from django.db import migrations, models
import django.db.models.deletion
import django_countries.fields
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='LunchType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=64)),
('sort_order', models.PositiveIntegerField()),
('unit_price', models.DecimalField(decimal_places=2, default=0, max_digits=12)),
],
options={
'ordering': ['sort_order'],
},
),
migrations.CreateModel(
name='PassType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('party', 'Party Pass'), ('full', 'Full Pass')], max_length=32)),
('name', models.CharField(max_length=64)),
('active', models.BooleanField(default=False)),
('sort_order', models.PositiveIntegerField()),
('quantity_in_stock', models.PositiveIntegerField(default=0)),
('unit_price', models.DecimalField(decimal_places=2, default=0, max_digits=12)),
('data', models.JSONField(blank=True)),
],
options={
'ordering': ['sort_order'],
},
),
migrations.CreateModel(
name='Registration',
fields=[
('id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('first_name', models.CharField(max_length=64)),
('last_name', models.CharField(max_length=64)),
('email', models.EmailField(max_length=254, unique=True)),
('dance_role', models.CharField(choices=[('leader', 'Leader'), ('follower', 'Follower')], default='leader', max_length=32)),
('residing_country', django_countries.fields.CountryField(max_length=2)),
('workshop_partner_name', models.CharField(blank=True, max_length=128)),
('workshop_partner_email', models.EmailField(blank=True, max_length=254)),
('crew_remarks', models.TextField(blank=True, max_length=4096)),
('total_price', models.DecimalField(decimal_places=2, default=0, max_digits=12)),
('audition_url', models.URLField(blank=True)),
('accepted_at', models.DateTimeField(blank=True, null=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('lunch', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='registration.lunchtype')),
('pass_type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='registration.passtype')),
],
),
migrations.CreateModel(
name='Payment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mollie_payment_id', models.CharField(blank=True, max_length=64, null=True, unique=True)),
('amount', models.DecimalField(decimal_places=2, default=0, max_digits=12)),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('registration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='registration.registration')),
],
),<|fim▁hole|> ('description', models.TextField(blank=True, max_length=4096)),
('created_at', models.DateTimeField(auto_now_add=True)),
('registration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='registration.registration')),
],
),
]<|fim▁end|>
|
migrations.CreateModel(
name='Interaction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
|
<|file_name|>editor.py<|end_file_name|><|fim▁begin|>"""gui systems to manage actions
"""
import os
from sftoolbox.content import ActionContent, PanelContent
from sftoolboxqt import qtgui, qtcore
from sftoolboxqt.tree import PanelsModel, PanelsTreeWidget
class ActionsTreeWidget(qtgui.QTreeWidget):
"""tree widget holding actions
"""
def startDrag(self, dropAction):
# create mime data object
mime = qtcore.QMimeData()
mime.setData('application/x-item', '???')
# start drag
drag = qtgui.QDrag(self)
drag.setMimeData(mime)
# drag.start(qtcore.Qt.CopyAction)
# drag.start(qtcore.Qt.CopyAction)
drag.exec_(dropAction, qtcore.Qt.MoveAction)
class PanelsWidget(qtgui.QWidget):
"""browser for panels
"""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(PanelsWidget, self).__init__(parent=parent)
self.setWindowTitle('Panels Browser')
self._project = project
self._tree_model = PanelsModel(project)
self._tree = self._create_panels_tree_widget(self._tree_model)
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree)
self.setLayout(layout)
def _create_panels_tree_widget(self, model):
"""return tree widget that will contain the actions
"""
tree = PanelsTreeWidget()
tree.setModel(model)
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
tree.setAcceptDrops(True)
return tree
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._tree_model.project = value
class ActionsWidget(qtgui.QWidget):
"""browser system for browsing trough the actions
"""
def _create_actions_tree_widget(self):
"""return tree widget that will contain the actions
"""
tree = ActionsTreeWidget()
tree.setHeaderLabels(['Action', 'IDName', 'Tags'])
tree.setSortingEnabled(True)
tree.setDragEnabled(True)
return tree
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(ActionsWidget, self).__init__(parent=parent)
self.setWindowTitle('Actions Browser')
self._project = project
self._tree_widget = self._create_actions_tree_widget()
layout = qtgui.QVBoxLayout()
layout.addWidget(self._tree_widget)
self.setLayout(layout)
layout.addWidget(self._tree_widget)
self._refresh_content()
@property
def project(self):
return self._project
@project.setter
def project(self, value):<|fim▁hole|> self._refresh_content()
def _handle_item_double_clicked(self, item):
"""handle doubleclicking item
"""
item.action.run()
def _refresh_content(self):
"""refresh the content
"""
self._tree_widget.clear()
self._tree_widget.itemDoubleClicked.connect(
self._handle_item_double_clicked)
if not self.project:
return
for action in self.project.actions:
item = qtgui.QTreeWidgetItem()
icon_filepath = action.absolute_icon_filepath
if icon_filepath and os.path.exists(icon_filepath):
item.setIcon(0, qtgui.QIcon(icon_filepath))
item.setText(0, action.human_label)
item.setText(1, action.idname)
item.setText(2, ', '.join(map(str, action.tags)))
item.action = action
self._tree_widget.addTopLevelItem(item)
class EditorWidget(qtgui.QWidget):
""""""
def __init__(self, project=None, parent=None):
"""construct the browser
"""
super(EditorWidget, self).__init__(parent=parent)
self.setWindowTitle('Editor')
self._actions_widget = ActionsWidget(project)
self._panels_widget = PanelsWidget(project)
layout = qtgui.QHBoxLayout()
splitter = qtgui.QSplitter(qtcore.Qt.Horizontal)
splitter.addWidget(self._panels_widget)
splitter.addWidget(self._actions_widget)
layout.addWidget(splitter)
self.setLayout(layout)
@property
def project(self):
return self._project
@project.setter
def project(self, value):
self._project = value
self._actions_widget.project = value
self._panels_widget.project = value
def sizeHint(self):
return qtcore.QSize(900, 800)<|fim▁end|>
|
self._project = value
|
<|file_name|>1____Serial_Device_Settings_Change_Check.py<|end_file_name|><|fim▁begin|>from ImageScripter import *<|fim▁hole|>
Viewer.Start()
Viewer.CloseAndClean()
Configurator.Start()
Configurator.inputoutput.Click()
addNewComDev(ComType = "Standard Connection",HardwareType = "Serial Port",Comport = '0')
addNewDevice(Configurator.genericserialdevices,"Generic Serial Device")
Configurator.system.Click()
Configurator.inputoutput.Click()
Configurator.serialport.Click()
Configurator.ComboBox.SelectAllWithIndex('1')
Configurator.apply.Click()
Configurator.changedsettings.Wait(seconds = 10)
Configurator.system.Click()
######################################Part 2
Configurator.inputoutput.Click()
for i in range(3):
Configurator.atatat2.RightClickTypeThenPress('aa','enter')
Add.PushButton.Click('OK')
Configurator.Edit.SetText(0,str(i))
Configurator.Edit.SetText(2,str(i))
Configurator.ComboBox.SelectAllWithIndex('1')
Configurator.apply.Click()
####################
Configurator.serialone.RightClickType('t')
Configurator.system.Click()
Configurator.RestartHard()
Configurator.Start()
Configurator.inputoutput.RealClick()
############################################TEST CODE
sleep(3)
Configurator.atatat2.RightClickType('d')
HlConfig.PushButton.Click('Yes')
Configurator.atatat2.WaitVanish()
Configurator.system.Click()
Configurator.Reset()<|fim▁end|>
|
from elan import *
raise ValueError('fff')
|
<|file_name|>requested_path.rs<|end_file_name|><|fim▁begin|>use iron::Request;
use std::path::{PathBuf, Path};
use std::fs::{self, Metadata};
use std::convert::AsRef;
pub struct RequestedPath {
pub path: PathBuf,
}
impl RequestedPath {
pub fn new<P: AsRef<Path>>(root_path: P, request: &Request) -> RequestedPath {
let mut path = root_path.as_ref().to_path_buf();
path.extend(&request.url.path);
RequestedPath { path: path }
}
pub fn should_redirect(&self, metadata: &Metadata, request: &Request) -> bool {
let last_url_element = request.url.path
.last()
.map(|s| s.as_ref());
// As per servo/rust-url/serialize_path, URLs ending in a slash have an
// empty string stored as the last component of their path. Rust-url
// even ensures that url.path is non-empty by appending a forward slash
// to URLs like http://example.com
// Some middleware may mutate the URL's path to violate this property,
// so the empty list case is handled as a redirect.
let has_trailing_slash = match last_url_element {
Some("") => true,
_ => false,
};
metadata.is_dir() && !has_trailing_slash
}
pub fn get_file(self, metadata: &Metadata) -> Option<PathBuf> {
if metadata.is_file() {
return Some(self.path);<|fim▁hole|>
match fs::metadata(&index_path) {
Ok(m) =>
if m.is_file() {
Some(index_path)
} else {
None
},
Err(_) => None,
}
}
}<|fim▁end|>
|
}
let index_path = self.path.join("index.html");
|
<|file_name|>live_audio_sample.py<|end_file_name|><|fim▁begin|>import numpy as np<|fim▁hole|>import pyaudio as pa
import wave
from time import sleep
#Constants used for sampling audio
CHUNK = 1024
FORMAT = pa.paInt16
CHANNELS = 1
RATE = 44100 # Must match rate at which mic actually samples sound
RECORD_TIMEFRAME = 1.0 #Time in seconds
OUTPUT_FILE = "sample.wav"
#Flag for plotting sound input waves for debugging and implementation purposes
TESTING_GRAPHS = True
def sampleAudio(wav_name=OUTPUT_FILE):
"""Samples audio from the microphone for a given period of time.
The output file is saved as [wav_name]
Code here taken from the front page of:
< https://people.csail.mit.edu/hubert/pyaudio/ > """
# Open the recording session
rec_session = pa.PyAudio()
stream = rec_session.open(format=FORMAT,
channels=CHANNELS,rate=RATE,input=True,frames_per_buffer=CHUNK)
print("Start recording")
frames = []
# Sample audio frames for given time period
for i in range(0, int(RATE/CHUNK*RECORD_TIMEFRAME)):
data = stream.read(CHUNK)
frames.append(data)
# Close the recording session
stream.stop_stream()
stream.close()
rec_session.terminate()
#Create the wav file for analysis
output_wav = wave.open(wav_name,"wb")
output_wav.setnchannels(CHANNELS)
output_wav.setsampwidth(rec_session.get_sample_size(FORMAT))
output_wav.setframerate(RATE)
output_wav.writeframes(b''.join(frames))
output_wav.close()
def getAvgFreq(wav_file=OUTPUT_FILE):
"""Analyzes the audio sample [wav_file] (must be a 16-bit WAV file with
one channel) and returns maximum magnitude of the most prominent sound
and the frequency thresholds it falls between.
Basic procedure of processing audio taken from:
< http://samcarcagno.altervista.org/blog/basic-sound-processing-python/ >"""
#Open wav file for analysis
sound_sample = wave.open(wav_file, "rb")
#Get sampling frequency
sample_freq = sound_sample.getframerate()
#Extract audio frames to be analyzed
# audio_frames = sound_sample.readframes(sound_sample.getnframes())
audio_frames = sound_sample.readframes(1024)
converted_val = []
#COnvert byte objects into frequency values per frame
for i in range(0,len(audio_frames),2):
if ord(audio_frames[i+1])>127:
converted_val.append(-(ord(audio_frames[i])+(256*(255-ord(audio_frames[i+1])))))
else:
converted_val.append(ord(audio_frames[i])+(256*ord(audio_frames[i+1])))
#Fit into numpy array for FFT analysis
freq_per_frame = np.array(converted_val)
# Get amplitude of soundwave section
freq = np.fft.fft(freq_per_frame)
amplitude = np.abs(freq)
amplitude = amplitude/float(len(freq_per_frame))
amplitude = amplitude**2
#Get bins/thresholds for frequencies
freqbins = np.fft.fftfreq(CHUNK,1.0/sample_freq)
x = np.linspace(0.0,1.0,1024)
# Plot data if need visualization
if(TESTING_GRAPHS):
#Plot raw data
plt.plot(converted_val)
plt.title("Raw Data")
plt.xlabel("Time (ms)")
plt.ylabel("Frequency (Hz)")
plt.show()
#Plot frequency histogram
plt.plot(freqbins[:16],amplitude[:16])
plt.title("Processed Data")
plt.xlabel("Frequency Bins")
plt.ylabel("Magnitude")
plt.show()
#Get the range that the max amplitude falls in. This represents the loudest noise
magnitude = np.amax(amplitude)
loudest = np.argmax(amplitude)
lower_thres = freqbins[loudest]
upper_thres = (freqbins[1]-freqbins[0])+lower_thres
#Close wav file
sound_sample.close()
#Return the magnitude of the sound wave and its frequency threshold for analysis
return magnitude, lower_thres, upper_thres
#Use for testing microphone input
if __name__ == "__main__":
# print("Wait 3 seconds to start...")
# sleep(3)
print("Recording!")
sampleAudio(OUTPUT_FILE)
print("Stop recording!")
print("Analyzing...")
mag, lower, upper = getAvgFreq(OUTPUT_FILE)
print("Magnitude is "+str(mag))
print("Lower bin threshold is "+str(lower))
print("Upper bin threshold is "+str(upper))<|fim▁end|>
|
import matplotlib.pyplot as plt #Used for graphing audio tests
|
<|file_name|>ProCamTransformer.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2009,2010,2011,2012 Samuel Audet
*
* This file is part of JavaCV.
*
* JavaCV is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 2 of the License, or
* (at your option) any later version (subject to the "Classpath" exception
* as provided in the LICENSE.txt file that accompanied this code).
*
* JavaCV is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JavaCV. If not, see <http://www.gnu.org/licenses/>.
*/
package com.googlecode.javacv;
import static com.googlecode.javacv.cpp.cvkernels.*;
import static com.googlecode.javacv.cpp.opencv_calib3d.*;
import static com.googlecode.javacv.cpp.opencv_core.*;
import static com.googlecode.javacv.cpp.opencv_imgproc.*;
/**
*
* @author Samuel Audet
*/
public class ProCamTransformer implements ImageTransformer {
public ProCamTransformer(double[] referencePoints,
CameraDevice camera, ProjectorDevice projector) {
this(referencePoints, camera, projector, null);
}
public ProCamTransformer(double[] referencePoints,
CameraDevice camera, ProjectorDevice projector, CvMat n) {
this.camera = camera;
this.projector = projector;
if (referencePoints != null) {
this.surfaceTransformer = new ProjectiveColorTransformer(
camera.cameraMatrix, camera.cameraMatrix, null, null, n,
referencePoints, null, null, 3, 0);
}
double[] referencePoints1 = { 0, 0, camera.imageWidth/2, camera.imageHeight, camera.imageWidth, 0 };
double[] referencePoints2 = { 0, 0, projector.imageWidth/2, projector.imageHeight, projector.imageWidth, 0 };
if (n != null) {
invCameraMatrix = CvMat.create(3, 3);
cvInvert(camera.cameraMatrix, invCameraMatrix);
JavaCV.perspectiveTransform(referencePoints2, referencePoints1,
invCameraMatrix, projector.cameraMatrix, projector.R, projector.T, n, true);
}
this.projectorTransformer = new ProjectiveColorTransformer(
camera.cameraMatrix, projector.cameraMatrix, projector.R, projector.T, null,
referencePoints1, referencePoints2, projector.colorMixingMatrix,
/*surfaceTransformer == null ? 3 : */1, 3);
// CvMat n2 = createParameters().getN();
if (referencePoints != null && n != null) {
frontoParallelH = camera.getFrontoParallelH(referencePoints, n, CvMat.create(3, 3));
invFrontoParallelH = frontoParallelH.clone();
cvInvert(frontoParallelH, invFrontoParallelH);
}
}
protected CameraDevice camera = null;
protected ProjectorDevice projector = null;
protected ProjectiveColorTransformer surfaceTransformer = null;
protected ProjectiveColorTransformer projectorTransformer = null;
protected IplImage[] projectorImage = null, surfaceImage = null;
protected CvScalar fillColor = cvScalar(0.0, 0.0, 0.0, 1.0);
protected CvRect roi = new CvRect();
protected CvMat frontoParallelH = null, invFrontoParallelH = null;
protected CvMat invCameraMatrix = null;
protected KernelData kernelData = null;
protected CvMat[] H1 = null;
protected CvMat[] H2 = null;
protected CvMat[] X = null;
public int getNumGains() {
return projectorTransformer.getNumGains();
}
public int getNumBiases() {
return projectorTransformer.getNumBiases();<|fim▁hole|>
public CvScalar getFillColor() {
return fillColor;
}
public void setFillColor(CvScalar fillColor) {
this.fillColor = fillColor;
}
public ProjectiveColorTransformer getSurfaceTransformer() {
return surfaceTransformer;
}
public ProjectiveColorTransformer getProjectorTransformer() {
return projectorTransformer;
}
public IplImage getProjectorImage(int pyramidLevel) {
return projectorImage[pyramidLevel];
}
public void setProjectorImage(IplImage projectorImage0, int minLevel, int maxLevel) {
setProjectorImage(projectorImage0, minLevel, maxLevel, true);
}
public void setProjectorImage(IplImage projectorImage0, int minLevel, int maxLevel, boolean convertToFloat) {
if (projectorImage == null || projectorImage.length != maxLevel+1) {
projectorImage = new IplImage[maxLevel+1];
}
if (projectorImage0.depth() == IPL_DEPTH_32F || !convertToFloat) {
projectorImage[minLevel] = projectorImage0;
} else {
if (projectorImage[minLevel] == null) {
projectorImage[minLevel] = IplImage.create(projectorImage0.width(), projectorImage0.height(),
IPL_DEPTH_32F, projectorImage0.nChannels(), projectorImage0.origin());
}
IplROI ir = projectorImage0.roi();
if (ir != null) {
int align = 1<<(maxLevel+1);
roi.x(Math.max(0, (int)Math.floor((double)ir.xOffset()/align)*align));
roi.y(Math.max(0, (int)Math.floor((double)ir.yOffset()/align)*align));
roi.width (Math.min(projectorImage0.width(), (int)Math.ceil((double)ir.width() /align)*align));
roi.height(Math.min(projectorImage0.height(), (int)Math.ceil((double)ir.height()/align)*align));
cvSetImageROI(projectorImage0, roi);
cvSetImageROI(projectorImage[minLevel], roi);
} else {
cvResetImageROI(projectorImage0);
cvResetImageROI(projectorImage[minLevel]);
}
cvConvertScale(projectorImage0, projectorImage[minLevel], 1.0/255.0, 0);
}
// CvScalar.ByValue average = cvAvg(projectorImage[0], null);
// cvSubS(projectorImage[0], average, projectorImage[0], null);
for (int i = minLevel+1; i <= maxLevel; i++) {
int w = projectorImage[i-1].width()/2;
int h = projectorImage[i-1].height()/2;
int d = projectorImage[i-1].depth();
int c = projectorImage[i-1].nChannels();
int o = projectorImage[i-1].origin();
if (projectorImage[i] == null) {
projectorImage[i] = IplImage.create(w, h, d, c, o);
}
IplROI ir = projectorImage[i-1].roi();
if (ir != null) {
roi.x(ir.xOffset()/2); roi.width (ir.width() /2);
roi.y(ir.yOffset()/2); roi.height(ir.height()/2);
cvSetImageROI(projectorImage[i], roi);
} else {
cvResetImageROI(projectorImage[i]);
}
cvPyrDown(projectorImage[i-1], projectorImage[i], CV_GAUSSIAN_5x5);
cvResetImageROI(projectorImage[i-1]);
}
}
public IplImage getSurfaceImage(int pyramidLevel) {
return surfaceImage[pyramidLevel];
}
public void setSurfaceImage(IplImage surfaceImage0, int pyramidLevels) {
if (surfaceImage == null || surfaceImage.length != pyramidLevels) {
surfaceImage = new IplImage[pyramidLevels];
}
surfaceImage[0] = surfaceImage0;
cvResetImageROI(surfaceImage0);
for (int i = 1; i < pyramidLevels; i++) {
int w = surfaceImage[i-1].width()/2;
int h = surfaceImage[i-1].height()/2;
int d = surfaceImage[i-1].depth();
int c = surfaceImage[i-1].nChannels();
int o = surfaceImage[i-1].origin();
if (surfaceImage[i] == null) {
surfaceImage[i] = IplImage.create(w, h, d, c, o);
} else {
cvResetImageROI(surfaceImage[i]);
}
cvPyrDown(surfaceImage[i-1], surfaceImage[i], CV_GAUSSIAN_5x5);
}
}
protected void prepareTransforms(CvMat H1, CvMat H2, CvMat X, int pyramidLevel, Parameters p) {
ProjectiveColorTransformer.Parameters cameraParameters = p.getSurfaceParameters();
ProjectiveColorTransformer.Parameters projectorParameters = p.getProjectorParameters();
if (surfaceTransformer != null) {
cvInvert(cameraParameters.getH(), H1);
}
cvInvert(projectorParameters.getH(), H2);
// adjust the scale of the transformation based on the pyramid level
if (pyramidLevel > 0) {
int scale = 1<<pyramidLevel;
if (surfaceTransformer != null) {
H1.put(2, H1.get(2)/scale);
H1.put(5, H1.get(5)/scale);
H1.put(6, H1.get(6)*scale);
H1.put(7, H1.get(7)*scale);
}
H2.put(2, H2.get(2)/scale);
H2.put(5, H2.get(5)/scale);
H2.put(6, H2.get(6)*scale);
H2.put(7, H2.get(7)*scale);
}
double[] x = projector.colorMixingMatrix.get();
double[] a = projectorParameters.getColorParameters();
double a2 = a[0];
X.put(a2*x[0], a2*x[1], a2*x[2], a[1],
a2*x[3], a2*x[4], a2*x[5], a[2],
a2*x[6], a2*x[7], a2*x[8], a[3],
0, 0, 0, 1);
}
public void transform(final IplImage srcImage, final IplImage dstImage, final CvRect roi,
final int pyramidLevel, final ImageTransformer.Parameters parameters, final boolean inverse) {
if (inverse) {
throw new UnsupportedOperationException("Inverse transform not supported.");
}
final Parameters p = ((Parameters)parameters);
final ProjectiveTransformer.Parameters cameraParameters = p.getSurfaceParameters();
final ProjectiveTransformer.Parameters projectorParameters = p.getProjectorParameters();
if (p.tempImage == null || p.tempImage.length <= pyramidLevel) {
p.tempImage = new IplImage[pyramidLevel+1];
}
p.tempImage[pyramidLevel] = IplImage.createIfNotCompatible(p.tempImage[pyramidLevel], dstImage);
if (roi == null) {
cvResetImageROI(p.tempImage[pyramidLevel]);
} else {
cvSetImageROI(p.tempImage[pyramidLevel], roi);
}
// Parallel.run(new Runnable() { public void run() {
// warp the template image
if (surfaceTransformer != null) {
surfaceTransformer.transform(srcImage, p.tempImage[pyramidLevel], roi, pyramidLevel, cameraParameters, false);
}
// }}, new Runnable() { public void run() {
// warp the projector image
projectorTransformer.transform(projectorImage[pyramidLevel], dstImage, roi, pyramidLevel, projectorParameters, false);
// }});
// multiply projector image with template image
if (surfaceTransformer != null) {
cvMul(dstImage, p.tempImage[pyramidLevel], dstImage, 1/dstImage.highValue());
} else {
cvCopy(p.tempImage[pyramidLevel], dstImage);
}
}
public void transform(CvMat srcPts, CvMat dstPts, ImageTransformer.Parameters parameters, boolean inverse) {
if (surfaceTransformer != null) {
surfaceTransformer.transform(srcPts, dstPts, ((Parameters)parameters).surfaceParameters, inverse);
} else if (dstPts != srcPts) {
dstPts.put(srcPts);
}
}
public void transform(Data[] data, CvRect roi, ImageTransformer.Parameters[] parameters, boolean[] inverses) {
assert data.length == parameters.length;
if (kernelData == null || kernelData.capacity() < data.length) {
kernelData = new KernelData(data.length);
}
if ((H1 == null || H1.length < data.length) && surfaceTransformer != null) {
H1 = new CvMat[data.length];
for (int i = 0; i < H1.length; i++) {
H1[i] = CvMat.create(3, 3);
}
}
if (H2 == null || H2.length < data.length) {
H2 = new CvMat[data.length];
for (int i = 0; i < H2.length; i++) {
H2[i] = CvMat.create(3, 3);
}
}
if (X == null || X.length < data.length) {
X = new CvMat[data.length];
for (int i = 0; i < X.length; i++) {
X[i] = CvMat.create(4, 4);
}
}
for (int i = 0; i < data.length; i++) {
kernelData.position(i);
kernelData.srcImg(projectorImage[data[i].pyramidLevel]);
kernelData.srcImg2(surfaceTransformer == null ? null : data[i].srcImg);
kernelData.subImg(data[i].subImg);
kernelData.srcDotImg(data[i].srcDotImg);
kernelData.mask(data[i].mask);
kernelData.zeroThreshold(data[i].zeroThreshold);
kernelData.outlierThreshold(data[i].outlierThreshold);
if (inverses != null && inverses[i]) {
throw new UnsupportedOperationException("Inverse transform not supported.");
}
prepareTransforms(surfaceTransformer == null ? null : H1[i],
H2[i], X[i], data[i].pyramidLevel, (Parameters)parameters[i]);
kernelData.H1(H2[i]);
kernelData.H2(surfaceTransformer == null ? null : H1[i]);
kernelData.X (X [i]);
kernelData.transImg(data[i].transImg);
kernelData.dstImg(data[i].dstImg);
kernelData.dstDstDot(data[i].dstDstDot);
}
int fullCapacity = kernelData.capacity();
kernelData.capacity(data.length);
multiWarpColorTransform(kernelData, roi, getFillColor());
kernelData.capacity(fullCapacity);
for (int i = 0; i < data.length; i++) {
kernelData.position(i);
data[i].dstCount = kernelData.dstCount();
data[i].dstCountZero = kernelData.dstCountZero();
data[i].dstCountOutlier = kernelData.dstCountOutlier();
data[i].srcDstDot = kernelData.srcDstDot();
}
// if (data[0].dstCountZero > 0) {
// System.err.println(data[0].dstCountZero + " out of " + data[0].dstCount
// + " are zero = " + 100*data[0].dstCountZero/data[0].dstCount + "%");
// }
}
public Parameters createParameters() {
return new Parameters();
}
public class Parameters implements ImageTransformer.Parameters {
protected Parameters() {
reset(false);
}
protected Parameters(ProjectiveColorTransformer.Parameters surfaceParameters,
ProjectiveColorTransformer.Parameters projectorParameters) {
reset(surfaceParameters, projectorParameters);
}
private ProjectiveColorTransformer.Parameters surfaceParameters = null;
private ProjectiveColorTransformer.Parameters projectorParameters = null;
private IplImage[] tempImage = null;
private CvMat H = CvMat.create(3, 3), R = CvMat.create(3, 3),
n = CvMat.create(3, 1), t = CvMat.create(3, 1);
public ProjectiveColorTransformer.Parameters getSurfaceParameters() {
return surfaceParameters;
}
public ProjectiveColorTransformer.Parameters getProjectorParameters() {
return projectorParameters;
}
private int getSizeForSurface() {
return surfaceTransformer == null ? 0 : surfaceParameters.size() -
surfaceTransformer.getNumGains() - surfaceTransformer.getNumBiases();
}
private int getSizeForProjector() {
return projectorParameters.size();
}
public int size() {
return getSizeForSurface() + getSizeForProjector();
}
public double[] get() {
double[] p = new double[size()];
for (int i = 0; i < p.length; i++) {
p[i] = get(i);
}
return p;
}
public double get(int i) {
if (i < getSizeForSurface()) {
return surfaceParameters.get(i);
} else {
return projectorParameters.get(i-getSizeForSurface());
}
}
public void set(double ... p) {
for (int i = 0; i < p.length; i++) {
set(i, p[i]);
}
}
public void set(int i, double p) {
if (i < getSizeForSurface()) {
surfaceParameters.set(i, p);
} else {
projectorParameters.set(i-getSizeForSurface(), p);
}
}
public void set(ImageTransformer.Parameters p) {
Parameters pcp = (Parameters)p;
if (surfaceTransformer != null) {
surfaceParameters.set(pcp.getSurfaceParameters());
surfaceParameters.resetColor(false);
}
projectorParameters.set(pcp.getProjectorParameters());
}
public void reset(boolean asIdentity) {
reset(null, null);
}
public void reset(ProjectiveColorTransformer.Parameters surfaceParameters,
ProjectiveColorTransformer.Parameters projectorParameters) {
if (surfaceParameters == null && surfaceTransformer != null) {
surfaceParameters = surfaceTransformer.createParameters();
}
if (projectorParameters == null) {
projectorParameters = projectorTransformer.createParameters();
}
this.surfaceParameters = surfaceParameters;
this.projectorParameters = projectorParameters;
setSubspace(getSubspace());
}
// public boolean addDelta(int i) {
// return addDelta(i, 1);
// }
// public boolean addDelta(int i, double scale) {
// // gradient varies linearly with intensity, so
// // the increment value is not very important, but
// // referenceCameraImage is good only for the value 1,
// // so let's use that
// if (i < getSizeForSurface()) {
// surfaceParameters.addDelta(i, scale);
// projectorParameters.setUpdateNeeded(true);
// } else {
// projectorParameters.addDelta(i-getSizeForSurface(), scale);
// }
//
// return false;
// }
public double getConstraintError() {
double error = surfaceTransformer == null ? 0 : surfaceParameters.getConstraintError();
projectorParameters.update();
return error;
}
public void compose(ImageTransformer.Parameters p1, boolean inverse1,
ImageTransformer.Parameters p2, boolean inverse2) {
throw new UnsupportedOperationException("Compose operation not supported.");
}
public boolean preoptimize() {
double[] p = setSubspaceInternal(getSubspaceInternal());
if (p != null) {
set(8, p[8]);
set(9, p[9]);
set(10, p[10]);
return true;
}
return false;
}
public void setSubspace(double ... p) {
double[] dst = setSubspaceInternal(p);
if (dst != null) {
set(dst);
}
}
public double[] getSubspace() {
return getSubspaceInternal();
}
private double[] setSubspaceInternal(double ... p) {
if (invFrontoParallelH == null) {
return null;
}
double[] dst = new double[8+3];
t.put(p[0], p[1], p[2]);
cvRodrigues2(t, R, null);
t.put(p[3], p[4], p[5]);
// compute new H
H.put(R.get(0), R.get(1), t.get(0),
R.get(3), R.get(4), t.get(1),
R.get(6), R.get(7), t.get(2));
cvMatMul(H, invFrontoParallelH, H);
cvMatMul(surfaceTransformer.getK2(), H, H);
cvMatMul(H, surfaceTransformer.getInvK1(), H);
// compute new n, rotation from the z-axis
cvGEMM(R, t, 1, null, 0, t, CV_GEMM_A_T);
double scale = 1/t.get(2);
n.put(0.0, 0.0, 1.0);
cvGEMM(R, n, scale, null, 0, n, 0);
// compute and set new three points
double[] src = projectorTransformer.getReferencePoints2();
JavaCV.perspectiveTransform(src, dst,
projectorTransformer.getInvK1(),projectorTransformer.getK2(),
projectorTransformer.getR(), projectorTransformer.getT(), n, true);
dst[8] = dst[0];
dst[9] = dst[2];
dst[10] = dst[4];
// compute and set new four points
JavaCV.perspectiveTransform(surfaceTransformer.getReferencePoints1(), dst, H);
return dst;
}
private double[] getSubspaceInternal() {
if (frontoParallelH == null) {
return null;
}
cvMatMul(surfaceTransformer.getK1(), frontoParallelH, H);
cvMatMul(surfaceParameters .getH(), H, H);
cvMatMul(surfaceTransformer.getInvK2(), H, H);
JavaCV.HtoRt(H, R, t);
cvRodrigues2(R, n, null);
double[] p = { n.get(0), n.get(1), n.get(2),
t.get(0), t.get(1), t.get(2) };
return p;
}
public CvMat getN() {
double[] src = projectorTransformer.getReferencePoints2();
double[] dst = projectorTransformer.getReferencePoints1().clone();
dst[0] = projectorParameters.get(0);
dst[2] = projectorParameters.get(1);
dst[4] = projectorParameters.get(2);
// get plane parameters n, but since we model the target to be
// the camera, we have to inverse everything before calling
// getPlaneParameters() and reframe the n it returns
cvTranspose(projectorTransformer.getR(), R);
cvGEMM(R, projectorTransformer.getT(), -1, null, 0, t, 0);
JavaCV.getPlaneParameters(src, dst, projectorTransformer.getInvK2(),
projectorTransformer.getK1(), R, t, n);
double d = 1 + cvDotProduct(n, projectorTransformer.getT());
cvGEMM(R, n, 1/d, null, 0, n, 0);
return n;
}
public CvMat getN0() {
n = getN();
if (surfaceTransformer == null) {
return n;
}
// remove projective effect of the current n,
// leaving only the effect of n0
camera.getFrontoParallelH(surfaceParameters.get(), n, R);
cvInvert(surfaceParameters.getH(), H);
cvMatMul(H, surfaceTransformer.getK2(), H);
cvMatMul(H, R, H);
cvMatMul(surfaceTransformer.getInvK1(), H, H);
JavaCV.HtoRt(H, R, t);
// compute n0, as a rotation from the z-axis
cvGEMM(R, t, 1, null, 0, t, CV_GEMM_A_T);
double scale = 1/t.get(2);
n.put(0.0, 0.0, 1.0);
cvGEMM(R, n, scale, null, 0, n, 0);
return n;
}
@Override public Parameters clone() {
Parameters p = new Parameters();
p.surfaceParameters = surfaceParameters == null ? null : surfaceParameters.clone();
p.projectorParameters = projectorParameters.clone();
return p;
}
@Override public String toString() {
if (surfaceParameters != null) {
return surfaceParameters.toString() + projectorParameters.toString();
} else {
return projectorParameters.toString();
}
}
}
}<|fim▁end|>
|
}
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.ZoomOut = exports.ZoomIn = exports.SlideRight = exports.SlideLeft = undefined;
var _slideLeft = require('./slide-left.scss');
var _slideLeft2 = _interopRequireDefault(_slideLeft);
var _slideRight = require('./slide-right.scss');
var _slideRight2 = _interopRequireDefault(_slideRight);
<|fim▁hole|>
var _zoomOut = require('./zoom-out.scss');
var _zoomOut2 = _interopRequireDefault(_zoomOut);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
exports.SlideLeft = _slideLeft2.default;
exports.SlideRight = _slideRight2.default;
exports.ZoomIn = _zoomIn2.default;
exports.ZoomOut = _zoomOut2.default;<|fim▁end|>
|
var _zoomIn = require('./zoom-in.scss');
var _zoomIn2 = _interopRequireDefault(_zoomIn);
|
<|file_name|>mapsourceesri.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="uk" version="2.0">
<context>
<name>MapSourceEsri</name>
<message>
<location filename="../../plugins/mapsourceesri/mapsourceesri.cpp" line="6"/>
<source>Esri map source</source>
<translation>Джерело мапи Esri</translation>
</message>
<message><|fim▁hole|> <location filename="../../plugins/mapsourceesri/mapsourceesri.cpp" line="7"/>
<source>Allows Map plugin to use Esri service as map source</source>
<translation>Дозвляє плагіну мапи використовувати сервіс Esri як джерело мапи</translation>
</message>
</context>
</TS><|fim▁end|>
| |
<|file_name|>RemoveAndGreedyFillScript.java<|end_file_name|><|fim▁begin|>package hmod.domain.mkp.scripts;
import flexbuilders.core.BuildException;
import flexbuilders.core.Buildable;
import flexbuilders.scripting.BuildScript;
import flexbuilders.tree.BranchBuilder;
import flexbuilders.tree.TreeHandler;
import static hmod.parser.builders.AlgorithmBuilders.*;
/**
*
* @author Enrique Urra C.
*/
public class RemoveAndGreedyFillScript extends BuildScript
{
private BranchBuilder callLoad, callMultiRemove, callGreedyFill, callSave;
private Buildable loadStart, multiRemoveStart, greedyFillStart, saveStart;
public RemoveAndGreedyFillScript(TreeHandler input) throws BuildException
{
super(input);
callLoad = branch(MKPIds.MKP_HEURISTIC_REMOVE_AND_GREEDY_FILL);
callMultiRemove = branch();
callGreedyFill = branch();
callSave = branch();
loadStart = ref(MKPIds.MKP_LOAD_SOLUTION);
multiRemoveStart = ref(MKPIds.MKP_OPERATION_MULTI_REMOVE);
greedyFillStart = ref(MKPIds.MKP_OPERATION_GREEDY_FILL);
saveStart = ref(MKPIds.MKP_SAVE_SOLUTION);
}
@Override
public void process() throws BuildException
{
callLoad.setBuildable(
subProcessStep().setNextStep(callMultiRemove).
setSubStep(loadStart)
);
callMultiRemove.setBuildable(
subProcessStep().setNextStep(callGreedyFill).
setSubStep(multiRemoveStart)
);
callGreedyFill.setBuildable(
subProcessStep().setNextStep(callSave).<|fim▁hole|>
callSave.setBuildable(
subProcessStep().
setSubStep(saveStart)
);
}
}<|fim▁end|>
|
setSubStep(greedyFillStart)
);
|
<|file_name|>webpack.exercise.js<|end_file_name|><|fim▁begin|>var webpack = require("webpack"),
HtmlWebpackPlugin = require("html-webpack-plugin"),
ExtractTextPlugin = require("extract-text-webpack-plugin"),
CopyWebpackPlugin = require("copy-webpack-plugin"),
helpers = require("./helpers");
const exercisePath = process.env.exercise;
var plugins = [
new webpack.optimize.CommonsChunkPlugin({
name: ["app", "vendor", "polyfills"]
}),
new ExtractTextPlugin("[name].css"),
new HtmlWebpackPlugin({
template: "index.html"
})
];
if (exercisePath === 'localization') {
plugins.push(
new CopyWebpackPlugin([{
from: "i18n", to: "i18n"
}])
)
}
module.exports = {
context: helpers.root() + '/' + exercisePath + "/src",
entry: {
app: "./main.ts",
vendor: helpers.root() + "/common/vendor.ts",
polyfills: helpers.root() + "/common/polyfills.ts"
},
resolve: {
extensions: [".webpack.js", ".web.js", ".ts", ".js"]
},
module: {
exprContextCritical: false,
loaders: [
{
test: /\.ts$/,
loaders: ["ts-loader", "angular2-router-loader?debug=true"]
},
{
test: /\.html$/,
loader: "html-loader"
},
{
test: /\.(png|jpe?g|gif|svg|woff|woff2|ttf|eot|ico)$/,
loader: "file?name=assets/[name].[hash].[ext]"
},
{
test: /\.css$/,
exclude: helpers.root("src", "app"),
loader: ExtractTextPlugin.extract({ fallback: 'style-loader', use: 'css-loader' })
},
{
test: /\.css$/,
include: helpers.root("src", "app"),
loader: "raw-loader"
},
{
test: /\.s(a|c)ss$/,
loaders: ["raw-loader", "sass-loader"]
}
]
},
plugins: plugins,
devtool: "source-map",<|fim▁hole|>
output: {
path: helpers.root("dist"),
publicPath: "http://localhost:8080/",
filename: "[name].js",
chunkFilename: "[id].chunk.js"
},
devServer: {
historyApiFallback: {
index: "http://localhost:8080/index.html"
}
}
}<|fim▁end|>
| |
<|file_name|>syntax_iterators.py<|end_file_name|><|fim▁begin|>from typing import Union, Iterator
from ...symbols import NOUN, PROPN, PRON
from ...errors import Errors
from ...tokens import Doc, Span
def noun_chunks(doclike: Union[Doc, Span]) -> Iterator[Span]:
"""
Detect base noun phrases from a dependency parse. Works on both Doc and Span.
"""
# fmt: off
labels = ["nsubj", "nsubj:pass", "obj", "iobj", "ROOT", "appos", "nmod", "nmod:poss"]
# fmt: on
doc = doclike.doc # Ensure works on both Doc and Span.
if not doc.has_annotation("DEP"):
raise ValueError(Errors.E029)
np_deps = [doc.vocab.strings[label] for label in labels]
conj = doc.vocab.strings.add("conj")
np_label = doc.vocab.strings.add("NP")
prev_end = -1
for i, word in enumerate(doclike):
if word.pos not in (NOUN, PROPN, PRON):
continue<|fim▁hole|> continue
if word.dep in np_deps:
prev_end = word.right_edge.i
yield word.left_edge.i, word.right_edge.i + 1, np_label
elif word.dep == conj:
head = word.head
while head.dep == conj and head.head.i < head.i:
head = head.head
# If the head is an NP, and we're coordinated to it, we're an NP
if head.dep in np_deps:
prev_end = word.right_edge.i
yield word.left_edge.i, word.right_edge.i + 1, np_label
SYNTAX_ITERATORS = {"noun_chunks": noun_chunks}<|fim▁end|>
|
# Prevent nested chunks from being produced
if word.left_edge.i <= prev_end:
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url
from django.contrib import admin
from rest_framework import routers
from courts import views
router = routers.DefaultRouter()
router.register(r'courts', views.CourtsViewSet)
<|fim▁hole|> url(r'^admin/', include(admin.site.urls)),
)<|fim▁end|>
|
urlpatterns = patterns('',
url(r'^api/v1/', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
|
<|file_name|>index.js<|end_file_name|><|fim▁begin|><|fim▁hole|>'use strict';
/**
* Rounds a number to decimal places
*
* @param {number} number to be rounded
* @param {integer} the number of place to round to
* @param {String} the rounding method to be used
* @returns {number} the number rounded to places
*/
function roundto(value, places, roundMethod) {
var rtn = 0;
var factorial = Math.pow(10, places);
roundMethod = typeof roundMethod !== 'undefined' ? roundMethod : 'round';
switch( roundMethod ) {
case 'floor':
case 'int':
rtn = Math.floor( value * factorial );
break;
case 'ceiling':
rtn = Math.ceil( value * factorial );
break;
default:
rtn = Math.round( value * factorial );
break;
}
// Divide number by factorial to get decimal places
rtn = rtn / factorial;
return rtn;
}
exports = module.exports = roundto;<|fim▁end|>
| |
<|file_name|>MineralArmorInfo.java<|end_file_name|><|fim▁begin|>/* This file is part of Arkhados.
Arkhados is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Arkhados is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Arkhados. If not, see <http://www.gnu.org/licenses/>. */
package arkhados.spell.buffs.info;
import arkhados.controls.CRotation;
import arkhados.controls.CTrackLocation;
import arkhados.effects.BuffEffect;
import com.jme3.math.Vector3f;
import com.jme3.scene.Node;
import com.jme3.scene.Spatial;
public class MineralArmorInfo extends BuffInfo {
{
setIconPath("Interface/Images/SpellIcons/MineralArmor.png");
}
@Override
public BuffEffect createBuffEffect(BuffInfoParameters params) {
MineralArmorEffect effect = new MineralArmorEffect(params.duration);<|fim▁hole|>
class MineralArmorEffect extends BuffEffect {
private Node centralNode = null;
public MineralArmorEffect(float timeLeft) {
super(timeLeft);
}
public void addToCharacter(BuffInfoParameters params) {
Node character = (Node) params.buffControl.getSpatial();
Spatial crystals1 = assets.loadModel("Models/crystals.j3o");
Spatial crystals2 = assets.loadModel("Models/crystals.j3o");
Spatial crystals3 = assets.loadModel("Models/crystals.j3o");
Spatial crystals4 = assets.loadModel("Models/crystals.j3o");
centralNode = new Node("mineral-armor-node");
centralNode.attachChild(crystals1);
centralNode.attachChild(crystals2);
centralNode.attachChild(crystals3);
centralNode.attachChild(crystals4);
crystals1.setLocalTranslation(-7.5f, 0f, 0f);
crystals2.setLocalTranslation(7.5f, 0f, 0f);
crystals3.setLocalTranslation(0f, 0f, -7.5f);
crystals4.setLocalTranslation(0f, 0f, 7.5f);
Node world = character.getParent();
world.attachChild(centralNode);
centralNode.addControl(
new CTrackLocation(character, new Vector3f(0f, 10f, 0f)));
centralNode.addControl(new CRotation(0f, 2f, 0f));
}
@Override
public void destroy() {
super.destroy();
centralNode.removeFromParent();
}
}<|fim▁end|>
|
effect.addToCharacter(params);
return effect;
}
}
|
<|file_name|>LanguageDialog.ts<|end_file_name|><|fim▁begin|>namespace sharp.Serene.Administration {
@Serenity.Decorators.registerClass()
export class LanguageDialog extends Serenity.EntityDialog<LanguageRow, any> {
protected getFormKey() { return LanguageForm.formKey; }
protected getIdProperty() { return LanguageRow.idProperty; }
protected getLocalTextPrefix() { return LanguageRow.localTextPrefix; }
protected getNameProperty() { return LanguageRow.nameProperty; }
protected getService() { return LanguageService.baseUrl; }
protected form = new LanguageForm(this.idPrefix);
}<|fim▁hole|><|fim▁end|>
|
}
|
<|file_name|>android.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Android ABI-compatibility module
//!
//! The ABI of Android has changed quite a bit over time, and libstd attempts to
//! be both forwards and backwards compatible as much as possible. We want to
//! always work with the most recent version of Android, but we also want to
//! work with older versions of Android for whenever projects need to.
//!
//! Our current minimum supported Android version is `android-9`, e.g., Android
//! with API level 9. We then in theory want to work on that and all future
//! versions of Android!
//!
//! Some of the detection here is done at runtime via `dlopen` and
//! introspection. Other times no detection is performed at all and we just
//! provide a fallback implementation as some versions of Android we support
//! don't have the function.
//!
//! You'll find more details below about why each compatibility shim is needed.
#![cfg(target_os = "android")]
use libc::{c_int, c_void, sighandler_t, size_t, ssize_t};
use libc::{ftruncate, pread, pwrite};
use io;
use super::{cvt, cvt_r};
// The `log2` and `log2f` functions apparently appeared in android-18, or at
// least you can see they're not present in the android-17 header [1] and they
// are present in android-18 [2].
//
// [1]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms
// /android-17/arch-arm/usr/include/math.h
// [2]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms
// /android-18/arch-arm/usr/include/math.h
//
// Note that these shims are likely less precise than directly calling `log2`,
// but hopefully that should be enough for now...
//
// Note that mathematically, for any arbitrary `y`:
//
// log_2(x) = log_y(x) / log_y(2)
// = log_y(x) / (1 / log_2(y))
// = log_y(x) * log_2(y)
//
// Hence because `ln` (log_e) is available on all Android we just choose `y = e`
// and get:
//
// log_2(x) = ln(x) * log_2(e)
#[cfg(not(test))]
pub fn log2f32(f: f32) -> f32 {
f.ln() * ::f32::consts::LOG2_E
}
#[cfg(not(test))]
pub fn log2f64(f: f64) -> f64 {
f.ln() * ::f64::consts::LOG2_E
}
// Back in the day [1] the `signal` function was just an inline wrapper
// around `bsd_signal`, but starting in API level android-20 the `signal`
// symbols was introduced [2]. Finally, in android-21 the API `bsd_signal` was
// removed [3].
//
// Basically this means that if we want to be binary compatible with multiple
// Android releases (oldest being 9 and newest being 21) then we need to check
// for both symbols and not actually link against either.
//<|fim▁hole|>// [1]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms
// /android-18/arch-arm/usr/include/signal.h
// [2]: https://chromium.googlesource.com/android_tools/+/fbd420/ndk_experimental
// /platforms/android-20/arch-arm
// /usr/include/signal.h
// [3]: https://chromium.googlesource.com/android_tools/+/20ee6d/ndk/platforms
// /android-21/arch-arm/usr/include/signal.h
pub unsafe fn signal(signum: c_int, handler: sighandler_t) -> sighandler_t {
weak!(fn signal(c_int, sighandler_t) -> sighandler_t);
weak!(fn bsd_signal(c_int, sighandler_t) -> sighandler_t);
let f = signal.get().or_else(|| bsd_signal.get());
let f = f.expect("neither `signal` nor `bsd_signal` symbols found");
f(signum, handler)
}
// The `ftruncate64` symbol apparently appeared in android-12, so we do some
// dynamic detection to see if we can figure out whether `ftruncate64` exists.
//
// If it doesn't we just fall back to `ftruncate`, generating an error for
// too-large values.
#[cfg(target_pointer_width = "32")]
pub fn ftruncate64(fd: c_int, size: u64) -> io::Result<()> {
weak!(fn ftruncate64(c_int, i64) -> c_int);
unsafe {
match ftruncate64.get() {
Some(f) => cvt_r(|| f(fd, size as i64)).map(|_| ()),
None => {
if size > i32::max_value() as u64 {
Err(io::Error::new(io::ErrorKind::InvalidInput,
"cannot truncate >2GB"))
} else {
cvt_r(|| ftruncate(fd, size as i32)).map(|_| ())
}
}
}
}
}
#[cfg(target_pointer_width = "64")]
pub fn ftruncate64(fd: c_int, size: u64) -> io::Result<()> {
unsafe {
cvt_r(|| ftruncate(fd, size as i64)).map(|_| ())
}
}
#[cfg(target_pointer_width = "32")]
pub unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: size_t, offset: i64)
-> io::Result<ssize_t>
{
use convert::TryInto;
weak!(fn pread64(c_int, *mut c_void, size_t, i64) -> ssize_t);
pread64.get().map(|f| cvt(f(fd, buf, count, offset))).unwrap_or_else(|| {
if let Ok(o) = offset.try_into() {
cvt(pread(fd, buf, count, o))
} else {
Err(io::Error::new(io::ErrorKind::InvalidInput,
"cannot pread >2GB"))
}
})
}
#[cfg(target_pointer_width = "32")]
pub unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: size_t, offset: i64)
-> io::Result<ssize_t>
{
use convert::TryInto;
weak!(fn pwrite64(c_int, *const c_void, size_t, i64) -> ssize_t);
pwrite64.get().map(|f| cvt(f(fd, buf, count, offset))).unwrap_or_else(|| {
if let Ok(o) = offset.try_into() {
cvt(pwrite(fd, buf, count, o))
} else {
Err(io::Error::new(io::ErrorKind::InvalidInput,
"cannot pwrite >2GB"))
}
})
}
#[cfg(target_pointer_width = "64")]
pub unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: size_t, offset: i64)
-> io::Result<ssize_t>
{
cvt(pread(fd, buf, count, offset))
}
#[cfg(target_pointer_width = "64")]
pub unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: size_t, offset: i64)
-> io::Result<ssize_t>
{
cvt(pwrite(fd, buf, count, offset))
}<|fim▁end|>
| |
<|file_name|>command-handlers.js<|end_file_name|><|fim▁begin|><|fim▁hole|>
function fixImports() {
let editor = atom.workspace.getActiveTextEditor()
if (editor) {
// fixImports(editor)
// editor.selectLinesContainingCursors()
}
}
module.exports = {
fixImports,
};<|fim▁end|>
|
'use strict';
|
<|file_name|>describe.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'functions describe' command."""
from googlecloudsdk.api_lib.functions import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
class Describe(base.DescribeCommand):
"""Show description of a function."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'name', help='The name of the function to describe.',
type=util.ValidateFunctionNameOrRaise)
@util.CatchHTTPErrorRaiseHTTPException
def Run(self, args):
"""This is what gets called when the user runs this command.<|fim▁hole|> args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
The specified function with its description and configured filter.
"""
client = self.context['functions_client']
messages = self.context['functions_messages']
project = properties.VALUES.core.project.Get(required=True)
registry = self.context['registry']
function_ref = registry.Parse(
args.name, params={'projectsId': project, 'locationsId': args.region},
collection='cloudfunctions.projects.locations.functions')
# TODO(user): Use resources.py here after b/21908671 is fixed.
return client.projects_locations_functions.Get(
messages.CloudfunctionsProjectsLocationsFunctionsGetRequest(
name=function_ref.RelativeName()))<|fim▁end|>
|
Args:
|
<|file_name|>pyunit_mnist_manyCols_gbm_large.py<|end_file_name|><|fim▁begin|>import sys
sys.path.insert(1,"../../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
def mnist_many_cols_gbm_large():
train = h2o.import_file(path=pyunit_utils.locate("bigdata/laptop/mnist/train.csv.gz"))
train.tail()
gbm_mnist = H2OGradientBoostingEstimator(ntrees=1,
max_depth=1,
min_rows=10,
learn_rate=0.01)
gbm_mnist.train(x=range(784), y=784, training_frame=train)
gbm_mnist.show()
<|fim▁hole|> pyunit_utils.standalone_test(mnist_many_cols_gbm_large)
else:
mnist_many_cols_gbm_large()<|fim▁end|>
|
if __name__ == "__main__":
|
<|file_name|>LCExtension.cc<|end_file_name|><|fim▁begin|>//# LCExtension.cc: Extend an LCRegion along straight lines to other dimensions
//# Copyright (C) 1998,2001
//# Associated Universities, Inc. Washington DC, USA.
//#
//# This library is free software; you can redistribute it and/or modify it
//# under the terms of the GNU Library General Public License as published by
//# the Free Software Foundation; either version 2 of the License, or (at your
//# option) any later version.
//#
//# This library is distributed in the hope that it will be useful, but WITHOUT
//# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
//# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Library General Public
//# License for more details.
//#
//# You should have received a copy of the GNU Library General Public License
//# along with this library; if not, write to the Free Software Foundation,
//# Inc., 675 Massachusetts Ave, Cambridge, MA 02139, USA.
//#
//# Correspondence concerning AIPS++ should be addressed as follows:
//# Internet email: [email protected].
//# Postal address: AIPS++ Project Office
//# National Radio Astronomy Observatory
//# 520 Edgemont Road
//# Charlottesville, VA 22903-2475 USA
//#
//# $Id$
#include <casacore/lattices/LRegions/LCExtension.h>
#include <casacore/lattices/LRegions/LCBox.h>
#include <casacore/casa/Arrays/Vector.h>
#include <casacore/tables/Tables/TableRecord.h>
#include <casacore/casa/Utilities/GenSort.h>
#include <casacore/casa/Exceptions/Error.h>
namespace casacore { //# NAMESPACE CASACORE - BEGIN
LCExtension::LCExtension()
{}
LCExtension::LCExtension (const LCRegion& region,
const IPosition& extendAxes,
const LCBox& extendBox)
: LCRegionMulti (True, region.cloneRegion())
{
// Fill the other members variables and determine the bounding box.
fill (extendAxes, extendBox);
}
LCExtension::LCExtension (Bool takeOver,
const LCRegion* region,
const IPosition& extendAxes,
const LCBox& extendBox)
: LCRegionMulti (takeOver, region)
{<|fim▁hole|>}
LCExtension::LCExtension (const LCExtension& other)
: LCRegionMulti (other),
itsExtendAxes (other.itsExtendAxes),
itsRegionAxes (other.itsRegionAxes),
itsExtendBox (other.itsExtendBox)
{}
LCExtension::~LCExtension()
{}
LCExtension& LCExtension::operator= (const LCExtension& other)
{
if (this != &other) {
LCRegionMulti::operator= (other);
itsExtendAxes.resize (other.itsExtendAxes.nelements());
itsRegionAxes.resize (other.itsRegionAxes.nelements());
itsExtendAxes = other.itsExtendAxes;
itsRegionAxes = other.itsRegionAxes;
itsExtendBox = other.itsExtendBox;
}
return *this;
}
Bool LCExtension::operator== (const LCRegion& other) const
{
// Check if parent class matches.
// If so, we can safely cast.
if (! LCRegionMulti::operator== (other)) {
return False;
}
const LCExtension& that = (const LCExtension&)other;
// Check the private data
if (! itsExtendAxes.isEqual (that.itsExtendAxes)
|| ! itsRegionAxes.isEqual (that.itsRegionAxes)
|| !(itsExtendBox == that.itsExtendBox)) {
return False;
}
return True;
}
LCRegion* LCExtension::cloneRegion() const
{
return new LCExtension (*this);
}
LCRegion* LCExtension::doTranslate (const Vector<Float>& translateVector,
const IPosition& newLatticeShape) const
{
uInt i;
// First translate the extendBox.
// Take appropriate elements from the vectors.
uInt nre = itsExtendAxes.nelements();
Vector<Float> boxTransVec (nre);
IPosition boxLatShape (nre);
for (i=0; i<nre; i++) {
uInt axis = itsExtendAxes(i);
boxTransVec(i) = translateVector(axis);
boxLatShape(i) = newLatticeShape(axis);
}
LCBox* boxPtr = (LCBox*)(itsExtendBox.translate (boxTransVec, boxLatShape));
// Now translate the region.
uInt nrr = itsRegionAxes.nelements();
Vector<Float> regTransVec (nrr);
IPosition regLatShape (nrr);
for (i=0; i<nrr; i++) {
uInt axis = itsRegionAxes(i);
regTransVec(i) = translateVector(axis);
regLatShape(i) = newLatticeShape(axis);
}
LCRegion* regPtr = region().translate (regTransVec, regLatShape);
// Create the new LCExtension object.
LCExtension* extPtr = new LCExtension (*regPtr, itsExtendAxes, *boxPtr);
delete boxPtr;
delete regPtr;
return extPtr;
}
String LCExtension::className()
{
return "LCExtension";
}
String LCExtension::type() const
{
return className();
}
TableRecord LCExtension::toRecord (const String& tableName) const
{
TableRecord rec;
defineRecordFields (rec, className());
rec.defineRecord ("region", region().toRecord (tableName));
rec.define ("axes", itsExtendAxes.asVector());
rec.defineRecord ("box", itsExtendBox.toRecord (tableName));
return rec;
}
LCExtension* LCExtension::fromRecord (const TableRecord& rec,
const String& tableName)
{
// Initialize pointers to 0 to get rid of gcc-2.95 warnings.
LCRegion* regPtr = 0;
regPtr = LCRegion::fromRecord (rec.asRecord("region"), tableName);
LCBox* boxPtr = 0;
boxPtr = (LCBox*)(LCRegion::fromRecord (rec.asRecord("box"), tableName));
LCExtension* extPtr = new LCExtension (True, regPtr,
Vector<Int>(rec.toArrayInt ("axes")),
*boxPtr);
delete boxPtr;
return extPtr;
}
void LCExtension::fillRegionAxes()
{
uInt nre = itsExtendAxes.nelements();
uInt nrr = region().ndim();
uInt nrdim = nre+nrr;
// allAxes will get the remaining (thus region) axes at the end.
IPosition allAxes = IPosition::makeAxisPath (nrdim, itsExtendAxes);
itsRegionAxes.resize (nrr);
for (uInt i=nre; i<nrdim; i++) {
uInt axis = allAxes(i);
itsRegionAxes(i-nre) = axis;
}
}
void LCExtension::fill (const IPosition& extendAxes, const LCBox& extendBox)
{
// Check if extend axes are specified correctly.
// They do not need to be in ascending order, but duplicates are
// not allowed.
IPosition regionShape = region().shape();
uInt nre = extendAxes.nelements();
if (nre == 0) {
throw (AipsError ("LCExtension::LCExtension - "
"no extend axes have been specified"));
}
if (nre != extendBox.blc().nelements()) {
throw (AipsError ("LCExtension::LCExtension - "
"number of axes in extend box mismatches "
"number of extend axes"));
}
// The axes can be specified in any order. We want them ordered.
// So sort them and fill itsExtendAxes and itsExtendBox.
itsExtendAxes.resize (nre);
IPosition boxLatShape(nre);
Vector<Float> boxLatBlc(nre);
Vector<Float> boxLatTrc(nre);
Vector<uInt> reginx(nre);
GenSortIndirect<ssize_t>::sort (reginx, extendAxes.storage(), nre);
Int first = -1;
for (uInt i=0; i<nre; i++) {
uInt axis = reginx(i);
itsExtendAxes(i) = extendAxes(axis);
boxLatShape(i) = extendBox.latticeShape()(axis);
boxLatBlc(i) = extendBox.blc()(axis);
boxLatTrc(i) = extendBox.trc()(axis);
if (itsExtendAxes(i) <= first) {
throw (AipsError ("LCExtension::LCExtension - "
"extend axes multiply specified"));
}
first = itsExtendAxes(i);
}
itsExtendBox = LCBox (boxLatBlc, boxLatTrc, boxLatShape);
// Fill itsRegionAxes, i.e. the mapping of the axis of the contributing
// region into the extended region.
fillRegionAxes();
// Make up the lattice shape from the region and box latticeshape.
// Fill the bounding box from blc/trc in region and box.
uInt nrr = itsRegionAxes.nelements();
uInt nrdim = nre+nrr;
IPosition latShape(nrdim);
IPosition blc (nrdim);
IPosition trc (nrdim);
const IPosition& regionShp = region().latticeShape();
const IPosition& regionBlc = region().boundingBox().start();
const IPosition& regionTrc = region().boundingBox().end();
for (uInt i=0; i<nrr; i++) {
uInt axis = itsRegionAxes(i);
latShape(axis) = regionShp(i);
blc(axis) = regionBlc(i);
trc(axis) = regionTrc(i);
}
const IPosition& boxShp = itsExtendBox.latticeShape();
const IPosition& boxBlc = itsExtendBox.boundingBox().start();
const IPosition& boxTrc = itsExtendBox.boundingBox().end();
for (uInt i=0; i<nre; i++) {
uInt axis = itsExtendAxes(i);
latShape(axis) = boxShp(i);
blc(axis) = boxBlc(i);
trc(axis) = boxTrc(i);
}
setShapeAndBoundingBox (latShape, Slicer(blc, trc, Slicer::endIsLast));
fillHasMask();
}
void LCExtension::multiGetSlice (Array<Bool>& buffer,
const Slicer& section)
{
buffer.resize (section.length());
uInt i;
uInt nre = itsExtendAxes.nelements();
uInt nrr = itsRegionAxes.nelements();
// Read the required region section.
// This means we have to create a Slicer for those axes only.
IPosition blc(nrr);
IPosition len(nrr);
IPosition inc(nrr);
IPosition shape(buffer.ndim(), 1);
for (i=0; i<nrr; i++) {
uInt axis = itsRegionAxes(i);
blc(i) = section.start()(axis);
len(i) = section.length()(axis);
inc(i) = section.stride()(axis);
shape(axis) = len(i);
}
Array<Bool> tmpbuf(len);
LCRegion* reg = (LCRegion*)(regions()[0]);
reg->doGetSlice (tmpbuf, Slicer(blc, len, inc));
// Reform tmpbuf, so it has the same dimensionality as buffer.
Array<Bool> mask = tmpbuf.reform (shape);
// Now we have to extend tmpbuf along all extend axes.
const IPosition& length = section.length();
IPosition pos (buffer.ndim(), 0);
IPosition end (buffer.shape() - 1);
//# Iterate along itsExtendAxes (the new axes) through the new mask.
for (;;) {
for (i=0; i<nre; i++) {
end(itsExtendAxes(i)) = pos(itsExtendAxes(i));
}
//# Set each section of the mask to the mask of the region.
buffer(pos,end) = mask;
//# Go to the next section.
for (i=0; i<nre; i++) {
if (++pos(itsExtendAxes(i)) < length(itsExtendAxes(i))) {
break;
}
// This dimension is done. Reset it and continue with the next.
pos(itsExtendAxes(i)) = 0;
}
//# End the iteration when all dimensions are done.
if (i == nre) {
break;
}
}
}
IPosition LCExtension::doNiceCursorShape (uInt maxPixels) const
{
return Lattice<Bool>::doNiceCursorShape (maxPixels);
}
} //# NAMESPACE CASACORE - END<|fim▁end|>
|
// Fill the other members variables and determine the bounding box.
fill (extendAxes, extendBox);
|
<|file_name|>WatchLeakTest.java<|end_file_name|><|fim▁begin|>/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.zookeeper.server.quorum;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import static org.junit.Assert.*;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey;
import java.nio.channels.Selector;
import java.nio.channels.SocketChannel;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Random;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import org.apache.jute.InputArchive;
import org.apache.jute.OutputArchive;
import org.apache.zookeeper.MockPacket;
import org.apache.zookeeper.ZooDefs;
import org.apache.zookeeper.proto.ConnectRequest;
import org.apache.zookeeper.proto.ReplyHeader;
import org.apache.zookeeper.proto.RequestHeader;
import org.apache.zookeeper.proto.SetWatches;
import org.apache.zookeeper.server.MockNIOServerCnxn;
import org.apache.zookeeper.server.NIOServerCnxn;
import org.apache.zookeeper.server.NIOServerCnxnFactory;
import org.apache.zookeeper.server.MockSelectorThread;
import org.apache.zookeeper.server.ZKDatabase;
import org.apache.zookeeper.server.ZooTrace;
import org.apache.zookeeper.server.persistence.FileTxnSnapLog;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Demonstrate ZOOKEEPER-1382 : Watches leak on expired session
*/
@RunWith(Parameterized.class)
public class WatchLeakTest {
protected static final Logger LOG = LoggerFactory
.getLogger(WatchLeakTest.class);
final long SESSION_ID = 0xBABEL;
private final boolean sessionTimedout;
public WatchLeakTest(boolean sessionTimedout) {
this.sessionTimedout = sessionTimedout;
}
@Parameters
public static Collection<Object[]> configs() {
return Arrays.asList(new Object[][] {
{ false }, { true },
});
}
/**
* Check that if session has expired then no watch can be set
*/
@Test
public void testWatchesLeak() throws Exception {
NIOServerCnxnFactory serverCnxnFactory = mock(NIOServerCnxnFactory.class);
final SelectionKey sk = new FakeSK();
MockSelectorThread selectorThread = mock(MockSelectorThread.class);
when(selectorThread.addInterestOpsUpdateRequest(any(SelectionKey.class))).thenAnswer(new Answer<Boolean>() {
@Override
public Boolean answer(InvocationOnMock invocation) throws Throwable {
SelectionKey sk = (SelectionKey)invocation.getArguments()[0];
NIOServerCnxn nioSrvCnx = (NIOServerCnxn)sk.attachment();
sk.interestOps(nioSrvCnx.getInterestOps());
return true;
}
});
ZKDatabase database = new ZKDatabase(null);
database.setlastProcessedZxid(2L);
QuorumPeer quorumPeer = mock(QuorumPeer.class);
FileTxnSnapLog logfactory = mock(FileTxnSnapLog.class);
// Directories are not used but we need it to avoid NPE
when(logfactory.getDataDir()).thenReturn(new File(""));
when(logfactory.getSnapDir()).thenReturn(new File(""));
FollowerZooKeeperServer fzks = null;
try {
// Create a new follower
fzks = new FollowerZooKeeperServer(logfactory, quorumPeer, database);
fzks.startup();
fzks.setServerCnxnFactory(serverCnxnFactory);
quorumPeer.follower = new MyFollower(quorumPeer, fzks);
LOG.info("Follower created");
// Simulate a socket channel between a client and a follower
final SocketChannel socketChannel = createClientSocketChannel();
// Create the NIOServerCnxn that will handle the client requests
final MockNIOServerCnxn nioCnxn = new MockNIOServerCnxn(fzks,
socketChannel, sk, serverCnxnFactory, selectorThread);<|fim▁hole|> // Send the connection request as a client do
nioCnxn.doIO(sk);
LOG.info("Client connection sent");
// Send the valid or invalid session packet to the follower
QuorumPacket qp = createValidateSessionPacketResponse(!sessionTimedout);
quorumPeer.follower.processPacket(qp);
LOG.info("Session validation sent");
// OK, now the follower knows that the session is valid or invalid, let's try
// to send the watches
nioCnxn.doIO(sk);
// wait for the the request processor to do his job
Thread.sleep(1000L);
LOG.info("Watches processed");
// If session has not been validated, there must be NO watches
int watchCount = database.getDataTree().getWatchCount();
if (sessionTimedout) {
// Session has not been re-validated !
LOG.info("session is not valid, watches = {}", watchCount);
assertEquals("Session is not valid so there should be no watches", 0, watchCount);
} else {
// Session has been re-validated
LOG.info("session is valid, watches = {}", watchCount);
assertEquals("Session is valid so the watch should be there", 1, watchCount);
}
} finally {
if (fzks != null) {
fzks.shutdown();
}
}
}
/**
* A follower with no real leader connection
*/
public static class MyFollower extends Follower {
/**
* Create a follower with a mocked leader connection
*
* @param self
* @param zk
*/
MyFollower(QuorumPeer self, FollowerZooKeeperServer zk) {
super(self, zk);
leaderOs = mock(OutputArchive.class);
leaderIs = mock(InputArchive.class);
bufferedOutput = mock(BufferedOutputStream.class);
}
}
/**
* Simulate the behavior of a real selection key
*/
private static class FakeSK extends SelectionKey {
@Override
public SelectableChannel channel() {
return null;
}
@Override
public Selector selector() {
return mock(Selector.class);
}
@Override
public boolean isValid() {
return true;
}
@Override
public void cancel() {
}
@Override
public int interestOps() {
return ops;
}
private int ops = OP_WRITE + OP_READ;
@Override
public SelectionKey interestOps(int ops) {
this.ops = ops;
return this;
}
@Override
public int readyOps() {
boolean reading = (ops & OP_READ) != 0;
boolean writing = (ops & OP_WRITE) != 0;
if (reading && writing) {
LOG.info("Channel is ready for reading and writing");
} else if (reading) {
LOG.info("Channel is ready for reading only");
} else if (writing) {
LOG.info("Channel is ready for writing only");
}
return ops;
}
}
/**
* Create a watches message with a single watch on /
*
* @return a message that attempts to set 1 watch on /
*/
private ByteBuffer createWatchesMessage() {
List<String> dataWatches = new ArrayList<String>(1);
dataWatches.add("/");
List<String> existWatches = Collections.emptyList();
List<String> childWatches = Collections.emptyList();
SetWatches sw = new SetWatches(1L, dataWatches, existWatches,
childWatches);
RequestHeader h = new RequestHeader();
h.setType(ZooDefs.OpCode.setWatches);
h.setXid(-8);
MockPacket p = new MockPacket(h, new ReplyHeader(), sw, null, null);
return p.createAndReturnBB();
}
/**
* This is the secret that we use to generate passwords, for the moment it
* is more of a sanity check.
*/
static final private long superSecret = 0XB3415C00L;
/**
* Create a connection request
*
* @return a serialized connection request
*/
private ByteBuffer createConnRequest() {
Random r = new Random(SESSION_ID ^ superSecret);
byte p[] = new byte[16];
r.nextBytes(p);
ConnectRequest conReq = new ConnectRequest(0, 1L, 30000, SESSION_ID, p);
MockPacket packet = new MockPacket(null, null, conReq, null, null, false);
return packet.createAndReturnBB();
}
/**
* Mock a client channel with a connection request and a watches message
* inside.
*
* @return a socket channel
* @throws IOException
*/
private SocketChannel createClientSocketChannel() throws IOException {
SocketChannel socketChannel = mock(SocketChannel.class);
Socket socket = mock(Socket.class);
InetSocketAddress socketAddress = new InetSocketAddress(1234);
when(socket.getRemoteSocketAddress()).thenReturn(socketAddress);
when(socketChannel.socket()).thenReturn(socket);
// Send watches packet to server connection
final ByteBuffer connRequest = createConnRequest();
final ByteBuffer watchesMessage = createWatchesMessage();
final ByteBuffer request = ByteBuffer.allocate(connRequest.limit()
+ watchesMessage.limit());
request.put(connRequest);
request.put(watchesMessage);
Answer<Integer> answer = new Answer<Integer>() {
int i = 0;
@Override
public Integer answer(InvocationOnMock invocation) throws Throwable {
Object[] args = invocation.getArguments();
ByteBuffer bb = (ByteBuffer) args[0];
for (int k = 0; k < bb.limit(); k++) {
bb.put(request.get(i));
i = i + 1;
}
return bb.limit();
}
};
when(socketChannel.read(any(ByteBuffer.class))).thenAnswer(answer);
return socketChannel;
}
/**
* Forge an invalid session packet as a LEADER do
*
* @param valid <code>true</code> to create a valid session message
*
* @throws Exception
*/
private QuorumPacket createValidateSessionPacketResponse(boolean valid) throws Exception {
QuorumPacket qp = createValidateSessionPacket();
ByteArrayInputStream bis = new ByteArrayInputStream(qp.getData());
DataInputStream dis = new DataInputStream(bis);
long id = dis.readLong();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(bos);
dos.writeLong(id);
// false means that the session has expired
dos.writeBoolean(valid);
qp.setData(bos.toByteArray());
return qp;
}
/**
* Forge an validate session packet as a LEARNER do
*
* @return
* @throws Exception
*/
private QuorumPacket createValidateSessionPacket() throws Exception {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
DataOutputStream dos = new DataOutputStream(baos);
dos.writeLong(SESSION_ID);
dos.writeInt(3000);
dos.close();
QuorumPacket qp = new QuorumPacket(Leader.REVALIDATE, -1,
baos.toByteArray(), null);
return qp;
}
}<|fim▁end|>
|
sk.attach(nioCnxn);
|
<|file_name|>configureStore.js<|end_file_name|><|fim▁begin|>import { createStore, compose, applyMiddleware } from 'redux';
import reduxImmutableStateInvariant from 'redux-immutable-state-invariant';
import thunk from 'redux-thunk';
import axios from 'axios';
import axiosMiddleware from 'redux-axios-middleware';
import rootReducer from '../reducers';
const client = axios.create({
//all axios can be used, shown in axios documentation
baseURL: '/api/',
responseType: 'json'
});
function configureStoreProd(initialState) {
const middlewares = [
// Add other middleware on this line...
// thunk middleware can also accept an extra argument to be passed to each thunk action
// https://github.com/gaearon/redux-thunk#injecting-a-custom-argument
thunk,
axiosMiddleware(client)
];
return createStore(
rootReducer,
initialState,
compose(applyMiddleware(...middlewares))
);
}
function configureStoreDev(initialState) {
const middlewares = [
// Add other middleware on this line...
// Redux middleware that spits an error on you when you try to mutate your state either inside a dispatch or between dispatches.
reduxImmutableStateInvariant(),
// thunk middleware can also accept an extra argument to be passed to each thunk action
// https://github.com/gaearon/redux-thunk#injecting-a-custom-argument
thunk,
axiosMiddleware(client)
];
const composeEnhancers =
window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; // add support for Redux dev tools
const store = createStore(
rootReducer,<|fim▁hole|>
if (module.hot) {
// Enable Webpack hot module replacement for reducers
module.hot.accept('../reducers', () => {
const nextReducer = require('../reducers').default; // eslint-disable-line global-require
store.replaceReducer(nextReducer);
});
}
return store;
}
const configureStore =
process.env.NODE_ENV === 'production'
? configureStoreProd
: configureStoreDev;
export default configureStore;<|fim▁end|>
|
initialState,
composeEnhancers(applyMiddleware(...middlewares))
);
|
<|file_name|>foursquares.py<|end_file_name|><|fim▁begin|>"""<|fim▁hole|>remaining two are in random colors.
"""
from turtlegraphics import Turtle
import random
def drawSquare(turtle, x, y, length):
turtle.up()
turtle.move(x, y)
turtle.setDirection(270)
turtle.down()
for count in xrange(4):
turtle.move(length)
turtle.turn(90)
def main():
turtle = Turtle()
#turtle.setWidth(1)
# Length of square
length = 40
# Relative distances to corners from origin
width = turtle.getWidth() / 2
height = turtle.getHeight() / 2
# Black
turtle.setColor(0, 0, 0)
# Upper left corner
drawSquare(turtle, -width, height, length)
# Gray
turtle.setColor(127, 127, 127)
# Lower left corner
drawSquare(turtle, -width, length - height, length)
# First random color
turtle.setColor(random.randint(0, 255),
random.randint(0, 255),
random.randint(0, 255))
# Upper right corner
drawSquare(turtle, width - length, height, length)
# Second random color
turtle.setColor(random.randint(0, 255),
random.randint(0, 255),
random.randint(0, 255))
# Lower right corner
drawSquare(turtle, width - length,
length - height, length)
main()<|fim▁end|>
|
File: foursquares.py
Draws squares in the corners of a turtle window.
One square is black, another is gray, and the
|
<|file_name|>method-ambig-one-trait-unknown-int-type.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that we invoking `foo()` successfully resolves to the trait `foo`<|fim▁hole|>trait foo {
fn foo(&self) -> isize;
}
impl foo for Vec<usize> {
fn foo(&self) -> isize {1}
}
impl foo for Vec<isize> {
fn foo(&self) -> isize {2}
}
// This is very hokey: we have heuristics to suppress messages about
// type annotations required. But placing these two bits of code into
// distinct functions, in this order, causes us to print out both
// errors I'd like to see.
fn m1() {
// we couldn't infer the type of the vector just based on calling foo()...
let mut x = Vec::new(); //~ ERROR type annotations required
x.foo();
}
fn m2() {
let mut x = Vec::new();
// ...but we still resolved `foo()` to the trait and hence know the return type.
let y: usize = x.foo(); //~ ERROR mismatched types
}
fn main() { }<|fim▁end|>
|
// (prompting the mismatched types error) but does not influence the choice
// of what kind of `Vec` we have, eventually leading to a type error.
|
<|file_name|>TmgProtocol.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2017 - 2018 Anton Tananaev ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.traccar.protocol;
import io.netty.handler.codec.string.StringDecoder;
import io.netty.handler.codec.string.StringEncoder;
import org.traccar.BaseProtocol;
import org.traccar.PipelineBuilder;
import org.traccar.TrackerServer;
public class TmgProtocol extends BaseProtocol {<|fim▁hole|> public TmgProtocol() {
addServer(new TrackerServer(false, getName()) {
@Override
protected void addProtocolHandlers(PipelineBuilder pipeline) {
pipeline.addLast(new TmgFrameDecoder());
pipeline.addLast(new StringEncoder());
pipeline.addLast(new StringDecoder());
pipeline.addLast(new TmgProtocolDecoder(TmgProtocol.this));
}
});
}
}<|fim▁end|>
| |
<|file_name|>event.rs<|end_file_name|><|fim▁begin|>use std::io::Write;
use termion::event::Key;
use Editor;
pub type EventHandler<'a, W> = FnMut(Event<W>) + 'a;
pub struct Event<'a, 'out: 'a, W: Write + 'a> {
pub editor: &'a mut Editor<'out, W>,
pub kind: EventKind,
}<|fim▁hole|>
impl<'a, 'out: 'a, W: Write + 'a> Event<'a, 'out, W> {
pub fn new(editor: &'a mut Editor<'out, W>, kind: EventKind) -> Self {
Event {
editor: editor,
kind: kind,
}
}
}
#[derive(Debug)]
pub enum EventKind {
/// Sent before handling a keypress.
BeforeKey(Key),
/// Sent after handling a keypress.
AfterKey(Key),
/// Sent in `Editor.complete()`, before processing the completion.
BeforeComplete,
}<|fim▁end|>
| |
<|file_name|>htmlmediaelement.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use audio_video_metadata;
use document_loader::LoadType;
use dom::attr::Attr;
use dom::bindings::cell::DOMRefCell;
use dom::bindings::codegen::Bindings::AttrBinding::AttrMethods;
use dom::bindings::codegen::Bindings::HTMLMediaElementBinding::CanPlayTypeResult;
use dom::bindings::codegen::Bindings::HTMLMediaElementBinding::HTMLMediaElementConstants::*;
use dom::bindings::codegen::Bindings::HTMLMediaElementBinding::HTMLMediaElementMethods;
use dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorConstants::*;
use dom::bindings::codegen::Bindings::MediaErrorBinding::MediaErrorMethods;
use dom::bindings::inheritance::Castable;
use dom::bindings::js::{MutNullableJS, Root};
use dom::bindings::refcounted::Trusted;
use dom::bindings::reflector::DomObject;
use dom::bindings::str::DOMString;
use dom::document::Document;
use dom::element::{Element, AttributeMutation};
use dom::event::{Event, EventBubbles, EventCancelable};
use dom::htmlaudioelement::HTMLAudioElement;
use dom::htmlelement::HTMLElement;
use dom::htmlsourceelement::HTMLSourceElement;
use dom::htmlvideoelement::HTMLVideoElement;
use dom::mediaerror::MediaError;
use dom::node::{window_from_node, document_from_node, Node, UnbindContext};
use dom::virtualmethods::VirtualMethods;
use dom_struct::dom_struct;
use html5ever::{LocalName, Prefix};
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use net_traits::{FetchResponseListener, FetchMetadata, Metadata, NetworkError};
use net_traits::request::{CredentialsMode, Destination, RequestInit, Type as RequestType};
use network_listener::{NetworkListener, PreInvoke};
use script_thread::{Runnable, ScriptThread};
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::cell::Cell;
use std::sync::{Arc, Mutex};
use task_source::TaskSource;
use time::{self, Timespec, Duration};
struct HTMLMediaElementContext {
/// The element that initiated the request.
elem: Trusted<HTMLMediaElement>,
/// The response body received to date.
data: Vec<u8>,
/// The response metadata received to date.
metadata: Option<Metadata>,
/// The generation of the media element when this fetch started.
generation_id: u32,
/// Time of last progress notification.
next_progress_event: Timespec,
/// Url of resource requested.
url: ServoUrl,
/// Whether the media metadata has been completely received.
have_metadata: bool,
/// True if this response is invalid and should be ignored.
ignore_response: bool,
}
impl FetchResponseListener for HTMLMediaElementContext {
fn process_request_body(&mut self) {}
fn process_request_eof(&mut self) {}
// https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
self.metadata = metadata.ok().map(|m| {
match m {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { unsafe_, .. } => unsafe_
}
});
// => "If the media data cannot be fetched at all..."
let is_failure = self.metadata
.as_ref()
.and_then(|m| m.status
.as_ref()
.map(|&(s, _)| s < 200 || s >= 300))
.unwrap_or(false);
if is_failure {
// Ensure that the element doesn't receive any further notifications
// of the aborted fetch. The dedicated failure steps will be executed
// when response_complete runs.
self.ignore_response = true;
}
}
fn process_response_chunk(&mut self, mut payload: Vec<u8>) {
if self.ignore_response {
return;
}
self.data.append(&mut payload);
let elem = self.elem.root();
// https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
// => "Once enough of the media data has been fetched to determine the duration..."
if !self.have_metadata {
self.check_metadata(&elem);
} else {
elem.change_ready_state(HAVE_CURRENT_DATA);
}
// https://html.spec.whatwg.org/multipage/#concept-media-load-resource step 4,
// => "If mode is remote" step 2
if time::get_time() > self.next_progress_event {
elem.queue_fire_simple_event("progress");
self.next_progress_event = time::get_time() + Duration::milliseconds(350);
}
}
// https://html.spec.whatwg.org/multipage/#media-data-processing-steps-list
fn process_response_eof(&mut self, status: Result<(), NetworkError>) {
let elem = self.elem.root();
// => "If the media data can be fetched but is found by inspection to be in an unsupported
// format, or can otherwise not be rendered at all"
if !self.have_metadata {
elem.queue_dedicated_media_source_failure_steps();
}
// => "Once the entire media resource has been fetched..."
else if status.is_ok() {
elem.change_ready_state(HAVE_ENOUGH_DATA);
elem.fire_simple_event("progress");
elem.network_state.set(NETWORK_IDLE);
elem.fire_simple_event("suspend");
}
// => "If the connection is interrupted after some media data has been received..."
else if elem.ready_state.get() != HAVE_NOTHING {
// Step 2
elem.error.set(Some(&*MediaError::new(&*window_from_node(&*elem),
MEDIA_ERR_NETWORK)));
// Step 3
elem.network_state.set(NETWORK_IDLE);
// TODO: Step 4 - update delay load flag
// Step 5
elem.fire_simple_event("error");
} else {
// => "If the media data cannot be fetched at all..."
elem.queue_dedicated_media_source_failure_steps();
}
let document = document_from_node(&*elem);
document.finish_load(LoadType::Media(self.url.clone()));
}
}
impl PreInvoke for HTMLMediaElementContext {
fn should_invoke(&self) -> bool {
//TODO: finish_load needs to run at some point if the generation changes.
self.elem.root().generation_id.get() == self.generation_id
}
}
impl HTMLMediaElementContext {
fn new(elem: &HTMLMediaElement, url: ServoUrl) -> HTMLMediaElementContext {
HTMLMediaElementContext {
elem: Trusted::new(elem),
data: vec![],
metadata: None,
generation_id: elem.generation_id.get(),
next_progress_event: time::get_time() + Duration::milliseconds(350),
url: url,
have_metadata: false,
ignore_response: false,
}
}
fn check_metadata(&mut self, elem: &HTMLMediaElement) {
match audio_video_metadata::get_format_from_slice(&self.data) {
Ok(audio_video_metadata::Metadata::Video(meta)) => {
let dur = meta.audio.duration.unwrap_or(::std::time::Duration::new(0, 0));
*elem.video.borrow_mut() = Some(VideoMedia {
format: format!("{:?}", meta.format),
duration: Duration::seconds(dur.as_secs() as i64) +
Duration::nanoseconds(dur.subsec_nanos() as i64),
width: meta.dimensions.width,
height: meta.dimensions.height,
video: meta.video.unwrap_or("".to_owned()),
audio: meta.audio.audio,
});
// Step 6
elem.change_ready_state(HAVE_METADATA);
self.have_metadata = true;
}
_ => {}
}
}
}
#[derive(JSTraceable, HeapSizeOf)]
pub struct VideoMedia {
format: String,
#[ignore_heap_size_of = "defined in time"]
duration: Duration,
width: u32,
height: u32,
video: String,
audio: Option<String>,
}
#[dom_struct]
pub struct HTMLMediaElement {
htmlelement: HTMLElement,
network_state: Cell<u16>,
ready_state: Cell<u16>,
current_src: DOMRefCell<String>,
generation_id: Cell<u32>,
first_data_load: Cell<bool>,
error: MutNullableJS<MediaError>,
paused: Cell<bool>,
autoplaying: Cell<bool>,
video: DOMRefCell<Option<VideoMedia>>,
}
impl HTMLMediaElement {
pub fn new_inherited(tag_name: LocalName,
prefix: Option<Prefix>, document: &Document)
-> HTMLMediaElement {
HTMLMediaElement {
htmlelement:
HTMLElement::new_inherited(tag_name, prefix, document),
network_state: Cell::new(NETWORK_EMPTY),
ready_state: Cell::new(HAVE_NOTHING),
current_src: DOMRefCell::new("".to_owned()),
generation_id: Cell::new(0),
first_data_load: Cell::new(true),
error: Default::default(),
paused: Cell::new(true),
autoplaying: Cell::new(true),
video: DOMRefCell::new(None),
}
}
// https://html.spec.whatwg.org/multipage/#internal-pause-steps
fn internal_pause_steps(&self) {
// Step 1
self.autoplaying.set(false);
// Step 2
if !self.Paused() {
// 2.1
self.paused.set(true);
// 2.2
self.queue_internal_pause_steps_task();
// TODO 2.3 (official playback position)
}
// TODO step 3 (media controller)
}
// https://html.spec.whatwg.org/multipage/#notify-about-playing
fn notify_about_playing(&self) {
// Step 1
self.fire_simple_event("playing");
// TODO Step 2
}
fn queue_notify_about_playing(&self) {
struct Task {
elem: Trusted<HTMLMediaElement>,
}
impl Runnable for Task {
fn handler(self: Box<Task>) {
self.elem.root().notify_about_playing();
}
}
let task = box Task {
elem: Trusted::new(self),
};
let win = window_from_node(self);
let _ = win.dom_manipulation_task_source().queue(task, win.upcast());
}
// https://html.spec.whatwg.org/multipage/#internal-pause-steps step 2.2
fn queue_internal_pause_steps_task(&self) {
struct Task {
elem: Trusted<HTMLMediaElement>,
}
impl Runnable for Task {
fn handler(self: Box<Task>) {
let elem = self.elem.root();
// 2.2.1
elem.fire_simple_event("timeupdate");
// 2.2.2
elem.fire_simple_event("pause");
// TODO 2.2.3
}
}
let task = box Task {
elem: Trusted::new(self),
};
let win = window_from_node(self);
let _ = win.dom_manipulation_task_source().queue(task, win.upcast());
}
fn queue_fire_simple_event(&self, type_: &'static str) {
let win = window_from_node(self);
let task = box FireSimpleEventTask::new(self, type_);
let _ = win.dom_manipulation_task_source().queue(task, win.upcast());
}
fn fire_simple_event(&self, type_: &str) {
let window = window_from_node(self);
let event = Event::new(window.upcast(),
Atom::from(type_),
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable);
event.fire(self.upcast());
}
// https://html.spec.whatwg.org/multipage/#ready-states
fn change_ready_state(&self, ready_state: u16) {
let old_ready_state = self.ready_state.get();
self.ready_state.set(ready_state);
if self.network_state.get() == NETWORK_EMPTY {
return;
}
// Step 1
match (old_ready_state, ready_state) {
// previous ready state was HAVE_NOTHING, and the new ready state is
// HAVE_METADATA
(HAVE_NOTHING, HAVE_METADATA) => {
self.queue_fire_simple_event("loadedmetadata");
}
// previous ready state was HAVE_METADATA and the new ready state is
// HAVE_CURRENT_DATA or greater
(HAVE_METADATA, HAVE_CURRENT_DATA) |
(HAVE_METADATA, HAVE_FUTURE_DATA) |
(HAVE_METADATA, HAVE_ENOUGH_DATA) => {
if self.first_data_load.get() {
self.first_data_load.set(false);
self.queue_fire_simple_event("loadeddata");
}
}
// previous ready state was HAVE_FUTURE_DATA or more, and the new ready
// state is HAVE_CURRENT_DATA or less
(HAVE_FUTURE_DATA, HAVE_CURRENT_DATA) |
(HAVE_ENOUGH_DATA, HAVE_CURRENT_DATA) |
(HAVE_FUTURE_DATA, HAVE_METADATA) |
(HAVE_ENOUGH_DATA, HAVE_METADATA) |
(HAVE_FUTURE_DATA, HAVE_NOTHING) |
(HAVE_ENOUGH_DATA, HAVE_NOTHING) => {
// TODO: timeupdate event logic + waiting
}
_ => (),
}
// Step 1
// If the new ready state is HAVE_FUTURE_DATA or HAVE_ENOUGH_DATA,
// then the relevant steps below must then be run also.
match (old_ready_state, ready_state) {
// previous ready state was HAVE_CURRENT_DATA or less, and the new ready
// state is HAVE_FUTURE_DATA
(HAVE_CURRENT_DATA, HAVE_FUTURE_DATA) |
(HAVE_METADATA, HAVE_FUTURE_DATA) |
(HAVE_NOTHING, HAVE_FUTURE_DATA) => {
self.queue_fire_simple_event("canplay");
if !self.Paused() {
self.queue_notify_about_playing();
}
}
// new ready state is HAVE_ENOUGH_DATA
(_, HAVE_ENOUGH_DATA) => {
if old_ready_state <= HAVE_CURRENT_DATA {
self.queue_fire_simple_event("canplay");
if !self.Paused() {
self.queue_notify_about_playing();
}<|fim▁hole|> }
//TODO: check sandboxed automatic features browsing context flag
if self.autoplaying.get() &&
self.Paused() &&
self.Autoplay() {
// Step 1
self.paused.set(false);
// TODO step 2: show poster
// Step 3
self.queue_fire_simple_event("play");
// Step 4
self.queue_notify_about_playing();
// Step 5
self.autoplaying.set(false);
}
self.queue_fire_simple_event("canplaythrough");
}
_ => (),
}
// TODO Step 2: media controller
}
// https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm
fn invoke_resource_selection_algorithm(&self) {
// Step 1
self.network_state.set(NETWORK_NO_SOURCE);
// TODO step 2 (show poster)
// TODO step 3 (delay load event)
// Step 4
let doc = document_from_node(self);
ScriptThread::await_stable_state(ResourceSelectionTask::new(self, doc.base_url()));
}
// https://html.spec.whatwg.org/multipage/#concept-media-load-algorithm
#[allow(unreachable_code)]
fn resource_selection_algorithm_sync(&self, base_url: ServoUrl) {
// TODO step 5 (populate pending text tracks)
// Step 6
let mode = if false {
// TODO media provider object
ResourceSelectionMode::Object
} else if let Some(attr) = self.upcast::<Element>().get_attribute(&ns!(), &local_name!("src")) {
ResourceSelectionMode::Attribute(attr.Value().to_string())
} else if false { // TODO: when implementing this remove #[allow(unreachable_code)] above.
// TODO <source> child
ResourceSelectionMode::Children(panic!())
} else {
self.network_state.set(NETWORK_EMPTY);
return;
};
// Step 7
self.network_state.set(NETWORK_LOADING);
// Step 8
self.queue_fire_simple_event("loadstart");
// Step 9
match mode {
ResourceSelectionMode::Object => {
// Step 1
*self.current_src.borrow_mut() = "".to_owned();
// Step 4
self.resource_fetch_algorithm(Resource::Object);
}
ResourceSelectionMode::Attribute(src) => {
// Step 1
if src.is_empty() {
self.queue_dedicated_media_source_failure_steps();
return;
}
// Step 2
let absolute_url = base_url.join(&src).map_err(|_| ());
// Step 3
if let Ok(url) = absolute_url {
*self.current_src.borrow_mut() = url.as_str().into();
// Step 4
self.resource_fetch_algorithm(Resource::Url(url));
} else {
self.queue_dedicated_media_source_failure_steps();
}
}
ResourceSelectionMode::Children(_child) => {
// TODO
self.queue_dedicated_media_source_failure_steps()
}
}
}
// https://html.spec.whatwg.org/multipage/#concept-media-load-resource
fn resource_fetch_algorithm(&self, resource: Resource) {
// TODO step 3 (remove text tracks)
// Step 4
if let Resource::Url(url) = resource {
// 4.1
if self.Preload() == "none" && !self.autoplaying.get() {
// 4.1.1
self.network_state.set(NETWORK_IDLE);
// 4.1.2
self.queue_fire_simple_event("suspend");
// TODO 4.1.3 (delay load flag)
// TODO 4.1.5-7 (state for load that initiates later)
return;
}
// 4.2
let context = Arc::new(Mutex::new(HTMLMediaElementContext::new(self, url.clone())));
let (action_sender, action_receiver) = ipc::channel().unwrap();
let window = window_from_node(self);
let listener = NetworkListener {
context: context,
task_source: window.networking_task_source(),
wrapper: Some(window.get_runnable_wrapper())
};
ROUTER.add_route(action_receiver.to_opaque(), box move |message| {
listener.notify_fetch(message.to().unwrap());
});
// FIXME: we're supposed to block the load event much earlier than now
let document = document_from_node(self);
let ty = if self.is::<HTMLAudioElement>() {
RequestType::Audio
} else if self.is::<HTMLVideoElement>() {
RequestType::Video
} else {
unreachable!("Unexpected HTMLMediaElement")
};
let request = RequestInit {
url: url.clone(),
type_: ty,
destination: Destination::Media,
credentials_mode: CredentialsMode::Include,
use_url_credentials: true,
origin: document.url(),
pipeline_id: Some(self.global().pipeline_id()),
referrer_url: Some(document.url()),
referrer_policy: document.get_referrer_policy(),
.. RequestInit::default()
};
document.fetch_async(LoadType::Media(url), request, action_sender);
} else {
// TODO local resource fetch
self.queue_dedicated_media_source_failure_steps();
}
}
fn queue_dedicated_media_source_failure_steps(&self) {
let window = window_from_node(self);
let _ = window.dom_manipulation_task_source().queue(
box DedicatedMediaSourceFailureTask::new(self), window.upcast());
}
// https://html.spec.whatwg.org/multipage/#dedicated-media-source-failure-steps
fn dedicated_media_source_failure(&self) {
// Step 1
self.error.set(Some(&*MediaError::new(&*window_from_node(self),
MEDIA_ERR_SRC_NOT_SUPPORTED)));
// TODO step 2 (forget resource tracks)
// Step 3
self.network_state.set(NETWORK_NO_SOURCE);
// TODO step 4 (show poster)
// Step 5
self.fire_simple_event("error");
// TODO step 6 (resolve pending play promises)
// TODO step 7 (delay load event)
}
// https://html.spec.whatwg.org/multipage/#media-element-load-algorithm
fn media_element_load_algorithm(&self) {
self.first_data_load.set(true);
// TODO Step 1 (abort resource selection algorithm instances)
// Step 2
self.generation_id.set(self.generation_id.get() + 1);
// TODO reject pending play promises
// Step 3
let network_state = self.NetworkState();
if network_state == NETWORK_LOADING || network_state == NETWORK_IDLE {
self.queue_fire_simple_event("abort");
}
// Step 4
if network_state != NETWORK_EMPTY {
// 4.1
self.queue_fire_simple_event("emptied");
// TODO 4.2 (abort in-progress fetch)
// TODO 4.3 (detach media provider object)
// TODO 4.4 (forget resource tracks)
// 4.5
if self.ready_state.get() != HAVE_NOTHING {
self.change_ready_state(HAVE_NOTHING);
}
// 4.6
if !self.Paused() {
self.paused.set(true);
}
// TODO 4.7 (seeking)
// TODO 4.8 (playback position)
// TODO 4.9 (timeline offset)
// TODO 4.10 (duration)
}
// TODO step 5 (playback rate)
// Step 6
self.error.set(None);
self.autoplaying.set(true);
// Step 7
self.invoke_resource_selection_algorithm();
// TODO step 8 (stop previously playing resource)
}
}
impl HTMLMediaElementMethods for HTMLMediaElement {
// https://html.spec.whatwg.org/multipage/#dom-media-networkstate
fn NetworkState(&self) -> u16 {
self.network_state.get()
}
// https://html.spec.whatwg.org/multipage/#dom-media-readystate
fn ReadyState(&self) -> u16 {
self.ready_state.get()
}
// https://html.spec.whatwg.org/multipage/#dom-media-autoplay
make_bool_getter!(Autoplay, "autoplay");
// https://html.spec.whatwg.org/multipage/#dom-media-autoplay
make_bool_setter!(SetAutoplay, "autoplay");
// https://html.spec.whatwg.org/multipage/#dom-media-src
make_url_getter!(Src, "src");
// https://html.spec.whatwg.org/multipage/#dom-media-src
make_setter!(SetSrc, "src");
// https://html.spec.whatwg.org/multipage/#attr-media-preload
// Missing value default is user-agent defined.
make_enumerated_getter!(Preload, "preload", "", "none" | "metadata" | "auto");
// https://html.spec.whatwg.org/multipage/#attr-media-preload
make_setter!(SetPreload, "preload");
// https://html.spec.whatwg.org/multipage/#dom-media-currentsrc
fn CurrentSrc(&self) -> DOMString {
DOMString::from(self.current_src.borrow().clone())
}
// https://html.spec.whatwg.org/multipage/#dom-media-load
fn Load(&self) {
self.media_element_load_algorithm();
}
// https://html.spec.whatwg.org/multipage/#dom-navigator-canplaytype
fn CanPlayType(&self, _type_: DOMString) -> CanPlayTypeResult {
// TODO: application/octet-stream
CanPlayTypeResult::Maybe
}
// https://html.spec.whatwg.org/multipage/#dom-media-error
fn GetError(&self) -> Option<Root<MediaError>> {
self.error.get()
}
// https://html.spec.whatwg.org/multipage/#dom-media-play
fn Play(&self) {
// TODO step 1
// Step 2
if self.error.get().map_or(false, |e| e.Code() == MEDIA_ERR_SRC_NOT_SUPPORTED) {
// TODO return rejected promise
return;
}
// TODO step 3
// Step 4
if self.network_state.get() == NETWORK_EMPTY {
self.invoke_resource_selection_algorithm();
}
// TODO step 5 (seek backwards)
// TODO step 6 (media controller)
let state = self.ready_state.get();
// Step 7
if self.Paused() {
// 7.1
self.paused.set(false);
// TODO 7.2 (show poster)
// 7.3
self.queue_fire_simple_event("play");
// 7.4
if state == HAVE_NOTHING ||
state == HAVE_METADATA ||
state == HAVE_CURRENT_DATA {
self.queue_fire_simple_event("waiting");
} else {
self.queue_notify_about_playing();
}
}
// Step 8
else if state == HAVE_FUTURE_DATA || state == HAVE_ENOUGH_DATA {
// TODO resolve pending play promises
}
// Step 9
self.autoplaying.set(false);
// TODO step 10 (media controller)
// TODO return promise
}
// https://html.spec.whatwg.org/multipage/#dom-media-pause
fn Pause(&self) {
// Step 1
if self.network_state.get() == NETWORK_EMPTY {
self.invoke_resource_selection_algorithm();
}
// Step 2
self.internal_pause_steps();
}
// https://html.spec.whatwg.org/multipage/#dom-media-paused
fn Paused(&self) -> bool {
self.paused.get()
}
}
impl VirtualMethods for HTMLMediaElement {
fn super_type(&self) -> Option<&VirtualMethods> {
Some(self.upcast::<HTMLElement>() as &VirtualMethods)
}
fn attribute_mutated(&self, attr: &Attr, mutation: AttributeMutation) {
self.super_type().unwrap().attribute_mutated(attr, mutation);
match attr.local_name() {
&local_name!("src") => {
if mutation.new_value(attr).is_some() {
self.media_element_load_algorithm();
}
}
_ => (),
};
}
// https://html.spec.whatwg.org/multipage/#playing-the-media-resource:remove-an-element-from-a-document
fn unbind_from_tree(&self, context: &UnbindContext) {
self.super_type().unwrap().unbind_from_tree(context);
if context.tree_in_doc {
ScriptThread::await_stable_state(PauseIfNotInDocumentTask::new(self));
}
}
}
struct FireSimpleEventTask {
elem: Trusted<HTMLMediaElement>,
type_: &'static str,
}
impl FireSimpleEventTask {
fn new(target: &HTMLMediaElement, type_: &'static str) -> FireSimpleEventTask {
FireSimpleEventTask {
elem: Trusted::new(target),
type_: type_,
}
}
}
impl Runnable for FireSimpleEventTask {
fn name(&self) -> &'static str { "FireSimpleEventTask" }
fn handler(self: Box<FireSimpleEventTask>) {
let elem = self.elem.root();
elem.fire_simple_event(self.type_);
}
}
struct ResourceSelectionTask {
elem: Trusted<HTMLMediaElement>,
base_url: ServoUrl,
}
impl ResourceSelectionTask {
fn new(elem: &HTMLMediaElement, url: ServoUrl) -> ResourceSelectionTask {
ResourceSelectionTask {
elem: Trusted::new(elem),
base_url: url,
}
}
}
impl Runnable for ResourceSelectionTask {
fn name(&self) -> &'static str { "ResourceSelectionTask" }
fn handler(self: Box<ResourceSelectionTask>) {
self.elem.root().resource_selection_algorithm_sync(self.base_url);
}
}
struct DedicatedMediaSourceFailureTask {
elem: Trusted<HTMLMediaElement>,
}
impl DedicatedMediaSourceFailureTask {
fn new(elem: &HTMLMediaElement) -> DedicatedMediaSourceFailureTask {
DedicatedMediaSourceFailureTask {
elem: Trusted::new(elem),
}
}
}
impl Runnable for DedicatedMediaSourceFailureTask {
fn name(&self) -> &'static str { "DedicatedMediaSourceFailureTask" }
fn handler(self: Box<DedicatedMediaSourceFailureTask>) {
self.elem.root().dedicated_media_source_failure();
}
}
struct PauseIfNotInDocumentTask {
elem: Trusted<HTMLMediaElement>,
}
impl PauseIfNotInDocumentTask {
fn new(elem: &HTMLMediaElement) -> PauseIfNotInDocumentTask {
PauseIfNotInDocumentTask {
elem: Trusted::new(elem),
}
}
}
impl Runnable for PauseIfNotInDocumentTask {
fn name(&self) -> &'static str { "PauseIfNotInDocumentTask" }
fn handler(self: Box<PauseIfNotInDocumentTask>) {
let elem = self.elem.root();
if !elem.upcast::<Node>().is_in_doc() {
elem.internal_pause_steps();
}
}
}
enum ResourceSelectionMode {
Object,
Attribute(String),
Children(Root<HTMLSourceElement>),
}
enum Resource {
Object,
Url(ServoUrl),
}<|fim▁end|>
| |
<|file_name|>GofmtBear.py<|end_file_name|><|fim▁begin|>from coalib.bearlib.abstractions.Linter import linter
from coalib.bears.requirements.GoRequirement import GoRequirement
@linter(executable='gofmt',
use_stdin=True,
output_format='corrected',
result_message='Formatting can be improved.')
class GofmtBear:
"""
Suggest better formatting options in Go code. Basic checks like alignment,
indentation, and redundant parentheses are provided.
This is done using the ``gofmt`` utility. For more information visit
<https://golang.org/cmd/gofmt/>.
"""
LANGUAGES = {'Go'}
REQUIREMENTS = {GoRequirement(package='golang.org/cmd/gofmt', flag='-u')}<|fim▁hole|> CAN_FIX = {'Formatting'}
ASCIINEMA_URL = 'https://asciinema.org/a/94812'
@staticmethod
def create_arguments(filename, file, config_file):
return ()<|fim▁end|>
|
AUTHORS = {'The coala developers'}
AUTHORS_EMAILS = {'[email protected]'}
LICENSE = 'AGPL-3.0'
|
<|file_name|>mainwindow.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2010-2017 Tuukka Turto
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Module for main window related functionality
"""
import PyQt4.QtGui
from herculeum.ui.controllers import EndScreenController, StartGameController
from herculeum.ui.gui.endscreen import EndScreen
from herculeum.ui.gui.eventdisplay import EventMessageDockWidget
from herculeum.ui.gui.map import PlayMapWindow
from herculeum.ui.gui.menu import MenuDialog
from herculeum.ui.gui.startgame import StartGameWidget
from PyQt4.QtCore import QFile, Qt
from PyQt4.QtGui import (QAction, QApplication, QCursor, QDialog, QIcon,
QMainWindow, QPixmap, QSplashScreen)
class QtUserInterface():
"""
Class for Qt User Interface
.. versionadded:: 0.9
"""
def __init__(self, application):
"""
Default constructor
"""
super().__init__()
self.application = application
self.splash_screen = None
self.qt_app = QApplication([])
# self.qt_app.setOverrideCursor(QCursor(Qt.BlankCursor))
def show_splash_screen(self):
"""
Show splash screen
"""
file = QFile(':herculeum.qss')
file.open(QFile.ReadOnly)
styleSheet = str(file.readAll().data(), 'ascii')
self.qt_app.setStyleSheet(styleSheet)
pixmap = QPixmap(':splash.png')
self.splash_screen = QSplashScreen(pixmap)
self.splash_screen.show()
def show_main_window(self):
"""
Show main window
"""
main_window = MainWindow(self.application,
self.application.surface_manager,
self.qt_app,
None,
Qt.FramelessWindowHint,
StartGameController(self.application.level_generator_factory,
self.application.creature_generator,
self.application.item_generator,
self.application.config.start_level))
self.splash_screen.finish(main_window)
main_window.show_new_game()
self.qt_app.exec_()
class MainWindow(QMainWindow):
"""
Class for displaying main window
.. versionadded:: 0.5
"""
def __init__(self, application, surface_manager, qt_app, parent, flags,
controller):
"""
Default constructor
"""
super().__init__(parent, flags)
self.application = application
self.surface_manager = surface_manager
self.qt_app = qt_app
self.controller = controller
self.__set_layout()
def __set_layout(self):
exit_action = QAction(QIcon(':exit-game.png'),
'&Quit',
self)
exit_action.setShortcut('Ctrl+Q')
exit_action.setStatusTip('Quit game')
exit_action.triggered.connect(PyQt4.QtGui.qApp.quit)
inventory_action = QAction(QIcon(':inventory.png'),
'Inventory',
self)
inventory_action.setShortcut('Ctrl+I')
inventory_action.setStatusTip('Show inventory')
inventory_action.triggered.connect(self.__show_menu)
character_action = QAction(QIcon(':character.png'),
'Character',
self)
character_action.setShortcut('Ctrl+C')
character_action.setStatusTip('Show character')
self.map_window = PlayMapWindow(parent=None,
model=self.application.world,
surface_manager=self.surface_manager,
action_factory=self.application.action_factory,
rng=self.application.rng,
rules_engine=self.application.rules_engine,
configuration=self.application.config)
self.setCentralWidget(self.map_window)
self.map_window.MenuRequested.connect(self.__show_menu)
self.map_window.EndScreenRequested.connect(self.__show_end_screen)
self.setGeometry(50, 50, 800, 600)
self.setWindowTitle('Herculeum')
self.setWindowIcon(QIcon(':rune-stone.png'))
self.showMaximized()
def show_new_game(self):
"""
Show new game dialog
"""
app = self.application
start_dialog = StartGameWidget(generator=app.player_generator,
config=self.application.config.controls,
parent=self,
application=self.application,
surface_manager=self.surface_manager,
flags=Qt.Dialog | Qt.CustomizeWindowHint)
result = start_dialog.exec_()
if result == QDialog.Accepted:
player = start_dialog.player_character
intro_text = self.controller.setup_world(self.application.world,
player)
player.register_for_updates(self.map_window.hit_points_widget)
self.map_window.hit_points_widget.show_hit_points(player)
self.map_window.hit_points_widget.show_spirit_points(player)
self.map_window.message_widget.text_edit.setText(intro_text)
self.__show_map_window()
def __show_map_window(self):
"""
Show map window
"""<|fim▁hole|> def __show_message_window(self, character):
"""
Show message display
:param character: character which events to display
:type character: Character
"""
messages_display = EventMessageDockWidget(self, character)
self.addDockWidget(Qt.BottomDockWidgetArea,
messages_display)
def __show_menu(self):
"""
Show menu
"""
menu_dialog = MenuDialog(self.surface_manager,
self.application.world.player,
self.application.action_factory,
self.application.config.controls,
self,
Qt.Dialog | Qt.CustomizeWindowHint)
menu_dialog.exec_()
def __show_end_screen(self):
"""
Show end screen
.. versionadded:: 0.8
"""
end_screen = EndScreen(self.application.world,
self.application.config.controls,
self,
Qt.Dialog | Qt.CustomizeWindowHint,
controller=EndScreenController())
end_screen.exec_()
self.qt_app.quit()<|fim▁end|>
|
self.map_window.construct_scene()
|
<|file_name|>config_test.go<|end_file_name|><|fim▁begin|>package apgo_test
import (
"testing"
"github.com/igkuz/apgo"
)
func TestNewConfig(t *testing.T) {
expectedDbName := "sgap_test"
expectedDbUser := "root"
expectedDbPass := "123"
expectedDbDialect := "mysql"
config := apgo.NewConfig()
if config.DB["name"] != expectedDbName {<|fim▁hole|> t.Error("Wrong DB user was configured")
}
if config.DB["password"] != expectedDbPass {
t.Error("Wrong DB password was configured")
}
if config.DB["dialect"] != expectedDbDialect {
t.Error("Wrong DB dialect was configured")
}
}<|fim▁end|>
|
t.Error("Wrong DB name was configured")
}
if config.DB["user"] != expectedDbUser {
|
<|file_name|>as_ref.rs<|end_file_name|><|fim▁begin|>#![feature(core)]
extern crate core;
#[cfg(test)]<|fim▁hole|> // /// Converts the array to immutable slice
// fn as_slice(&self) -> &[T];
// /// Converts the array to mutable slice
// fn as_mut_slice(&mut self) -> &mut [T];
// }
// macro_rules! array_impls {
// ($($N:expr)+) => {
// $(
// #[unstable(feature = "core")]
// impl<T> FixedSizeArray<T> for [T; $N] {
// #[inline]
// fn as_slice(&self) -> &[T] {
// &self[..]
// }
// #[inline]
// fn as_mut_slice(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsRef<[T]> for [T; $N] {
// #[inline]
// fn as_ref(&self) -> &[T] {
// &self[..]
// }
// }
//
// #[unstable(feature = "array_as_ref",
// reason = "should ideally be implemented for all fixed-sized arrays")]
// impl<T> AsMut<[T]> for [T; $N] {
// #[inline]
// fn as_mut(&mut self) -> &mut [T] {
// &mut self[..]
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Copy> Clone for [T; $N] {
// fn clone(&self) -> [T; $N] {
// *self
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: Hash> Hash for [T; $N] {
// fn hash<H: hash::Hasher>(&self, state: &mut H) {
// Hash::hash(&self[..], state)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T: fmt::Debug> fmt::Debug for [T; $N] {
// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// fmt::Debug::fmt(&&self[..], f)
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a [T; $N] {
// type Item = &'a T;
// type IntoIter = Iter<'a, T>;
//
// fn into_iter(self) -> Iter<'a, T> {
// self.iter()
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<'a, T> IntoIterator for &'a mut [T; $N] {
// type Item = &'a mut T;
// type IntoIter = IterMut<'a, T>;
//
// fn into_iter(self) -> IterMut<'a, T> {
// self.iter_mut()
// }
// }
//
// // NOTE: some less important impls are omitted to reduce code bloat
// __impl_slice_eq1! { [A; $N], [B; $N] }
// __impl_slice_eq2! { [A; $N], [B] }
// __impl_slice_eq2! { [A; $N], &'b [B] }
// __impl_slice_eq2! { [A; $N], &'b mut [B] }
// // __impl_slice_eq2! { [A; $N], &'b [B; $N] }
// // __impl_slice_eq2! { [A; $N], &'b mut [B; $N] }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Eq> Eq for [T; $N] { }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:PartialOrd> PartialOrd for [T; $N] {
// #[inline]
// fn partial_cmp(&self, other: &[T; $N]) -> Option<Ordering> {
// PartialOrd::partial_cmp(&&self[..], &&other[..])
// }
// #[inline]
// fn lt(&self, other: &[T; $N]) -> bool {
// PartialOrd::lt(&&self[..], &&other[..])
// }
// #[inline]
// fn le(&self, other: &[T; $N]) -> bool {
// PartialOrd::le(&&self[..], &&other[..])
// }
// #[inline]
// fn ge(&self, other: &[T; $N]) -> bool {
// PartialOrd::ge(&&self[..], &&other[..])
// }
// #[inline]
// fn gt(&self, other: &[T; $N]) -> bool {
// PartialOrd::gt(&&self[..], &&other[..])
// }
// }
//
// #[stable(feature = "rust1", since = "1.0.0")]
// impl<T:Ord> Ord for [T; $N] {
// #[inline]
// fn cmp(&self, other: &[T; $N]) -> Ordering {
// Ord::cmp(&&self[..], &&other[..])
// }
// }
// )+
// }
// }
// array_impls! {
// 0 1 2 3 4 5 6 7 8 9
// 10 11 12 13 14 15 16 17 18 19
// 20 21 22 23 24 25 26 27 28 29
// 30 31 32
// }
type T = i32;
#[test]
fn as_ref_test1() {
let array: [T; 2] = [
0, 1
];
let as_ref: &[T] = array.as_ref();
for i in 0..2 {
assert_eq!(array[i], as_ref[i]);
}
}
}<|fim▁end|>
|
mod tests {
use core::convert::AsRef;
// pub trait FixedSizeArray<T> {
|
<|file_name|>widget.configmanager.js<|end_file_name|><|fim▁begin|>/*
* Copyright (c) 2016 Juniper Networks, Inc. All rights reserved.
*/<|fim▁hole|> 'confignode-modelcfg','controlnode-modelcfg','vrouter-modelcfg',
'databasenode-modelcfg','analyticsnode-modelcfg','monitor-infra-modelcfg',
'security-dashboard-modelcfg',
'monitor-infra-viewcfg','confignode-viewcfg', 'databasenode-viewcfg',
'vrouter-viewcfg', 'security-dashboard-viewcfg', 'alarms-viewconfig'
], function(
_,ControlNodeWidgetCfg, VRouterWidgetCfg, DBNodeWidgetCfg,
AnalyticsNodeWidgetCfg, CfgNodeWidgetCfg,MonitorInfraWidgetCfg,
SecurityDashboardWidgetCfg,
CfgNodeModelCfg,ControlNodeModelCfg,VRouterModelCfg,DatabaseNodeModelCfg,
AnaltyicsNodeModelCfg,MonitorInfraModelCfg,SecurityDashboardModelCfg,
MonitorInfraViewCfg,CfgNodeViewCfg, DBNodeViewCfg, VRouterViewCfg,
SecurityDashboardViewConfig, AlarmsViewConfig, SecurityDashboardViewCfg ) {
var widgetCfgManager = function() {
var self = this;
var widgetCfgMap = {},
widgetViewCfgMap = {},
widgetModelCfgMap = {};
//Populate the available widget config maps
$.extend(widgetCfgMap, ControlNodeWidgetCfg, VRouterWidgetCfg,
DBNodeWidgetCfg, AnalyticsNodeWidgetCfg,
CfgNodeWidgetCfg,MonitorInfraWidgetCfg, SecurityDashboardWidgetCfg);
//Populate the available model config maps
$.extend(widgetModelCfgMap, CfgNodeModelCfg,ControlNodeModelCfg,VRouterModelCfg,
DatabaseNodeModelCfg,AnaltyicsNodeModelCfg,MonitorInfraModelCfg,
SecurityDashboardModelCfg);
$.extend(widgetViewCfgMap, MonitorInfraViewCfg, CfgNodeViewCfg,
DBNodeViewCfg, VRouterViewCfg, SecurityDashboardViewConfig, AlarmsViewConfig,
SecurityDashboardViewCfg);
//,ControlNodeViewCfg,VRouterViewCfg,DatabaseNodeViewCfg,AnaltyicsNodeViewCfg,);
self.get = function(widgetId,overrideCfg,i) {
var widgetCfg = _.isFunction(widgetCfgMap[widgetId]) ? widgetCfgMap[widgetId](overrideCfg,i) : widgetCfgMap[widgetId];
if (widgetCfg == null) {
widgetCfg = _.isFunction(widgetViewCfgMap[widgetId]) ? widgetViewCfgMap[widgetId](overrideCfg) : widgetViewCfgMap[widgetId];
}
var modelCfg = {},viewCfg = {},baseModelCfg;
if(widgetCfg['baseModel'] != null) {
baseModelCfg = widgetModelCfgMap[widgetCfg['baseModel']];
if(widgetCfg['modelCfg'] != null) {
$.extend(true,modelCfg,baseModelCfg,widgetCfg['modelCfg'])
} else {
modelCfg = baseModelCfg;
}
if(_.result(baseModelCfg,'type','')) {
widgetCfg['tag'] = baseModelCfg['type'];
}
widgetCfg['modelCfg'] = modelCfg;
}
if(_.result(widgetCfg,'modelCfg.type','')) {
widgetCfg['tag'] = _.result(widgetCfg,'modelCfg.type');
}
if(widgetCfg['baseView'] != null) {
baseViewCfg = widgetViewCfgMap[widgetCfg['baseView']];
if(widgetCfg['viewCfg'] != null) {
$.extend(true,viewCfg,baseViewCfg,widgetCfg['viewCfg'])
} else {
viewCfg= baseViewCfg;
}
widgetCfg['viewCfg'] = viewCfg;
}
return widgetCfg;
}
//Returns list of available widgets
self.getWidgetList = function() {
// return _.keys(widgetCfgMap);
var widgetMap = _.map(_.keys(widgetCfgMap),function(widgetId) {
return {
key: widgetId,
value: self.get(widgetId),
tag: self.get(widgetId)['tag']
}
});
widgetMap = _.groupBy(widgetMap,function(d) {
return d.tag;
});
//Pick yAxisLabel if exists else return widgetId
return _.map(widgetMap,function(value,key) {
return {
text: key,
children: _.map(value, function(widgetCfg) {
return {
id:widgetCfg['key'],
text:_.result(widgetCfg['value'],'viewCfg.viewConfig.chartOptions.yAxisLabel',widgetCfg['key'])
}
})
// val:_.result(widgetCfg['value'],'viewCfg.viewConfig.chartOptions.yAxisLabel',widgetCfg['key'])
}
});
}
self.modelInstMap = {};
}
return new widgetCfgManager();
});<|fim▁end|>
|
define([ 'lodash',
'controlnode-widgetcfg', 'vrouter-widgetcfg','databasenode-widgetcfg',
'analyticsnode-widgetcfg','confignode-widgetcfg','monitor-infra-widgetcfg',
'security-dashboard-widgetcfg',
|
<|file_name|>expanded-cfg.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed<|fim▁hole|>// compile-pass
#![feature(custom_attribute)]
macro_rules! mac {
{} => {
#[cfg(attr)]
mod m {
#[lang_item]
fn f() {}
#[cfg_attr(target_thread_local, custom)]
fn g() {}
}
#[cfg(attr)]
unconfigured_invocation!();
}
}
mac! {}
fn main() {}<|fim▁end|>
|
// except according to those terms.
// skip-codegen
|
<|file_name|>ajv-bsontype-tests.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>ajvBsontype(ajv);<|fim▁end|>
|
import Ajv from 'ajv';
import ajvBsontype = require('ajv-bsontype');
const ajv = new Ajv();
|
<|file_name|>sites.py<|end_file_name|><|fim▁begin|>import sys
from functools import update_wrapper
from future.utils import iteritems
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.base import ModelBase
from django.utils import six
from django.views.decorators.cache import never_cache
from django.template.engine import Engine
import inspect
if six.PY2 and sys.getdefaultencoding() == 'ascii':
import imp
imp.reload(sys)
sys.setdefaultencoding("utf-8")
class AlreadyRegistered(Exception):
pass
class NotRegistered(Exception):
pass
class MergeAdminMetaclass(type):
def __new__(cls, name, bases, attrs):
return type.__new__(cls, str(name), bases, attrs)
class AdminSite(object):
def __init__(self, name='xadmin'):
self.name = name
self.app_name = 'xadmin'
self._registry = {} # model_class class -> admin_class class
self._registry_avs = {} # admin_view_class class -> admin_class class
self._registry_settings = {} # settings name -> admin_class class
self._registry_views = []
# url instance contains (path, admin_view class, name)
self._registry_modelviews = []
# url instance contains (path, admin_view class, name)
self._registry_plugins = {} # view_class class -> plugin_class class
self._admin_view_cache = {}<|fim▁hole|> self.model_admins_order = 0
def copy_registry(self):
import copy
return {
'models': copy.copy(self._registry),
'avs': copy.copy(self._registry_avs),
'views': copy.copy(self._registry_views),
'settings': copy.copy(self._registry_settings),
'modelviews': copy.copy(self._registry_modelviews),
'plugins': copy.copy(self._registry_plugins),
}
def restore_registry(self, data):
self._registry = data['models']
self._registry_avs = data['avs']
self._registry_views = data['views']
self._registry_settings = data['settings']
self._registry_modelviews = data['modelviews']
self._registry_plugins = data['plugins']
def register_modelview(self, path, admin_view_class, name):
from xadmin.views.base import BaseAdminView
if issubclass(admin_view_class, BaseAdminView):
self._registry_modelviews.append((path, admin_view_class, name))
else:
raise ImproperlyConfigured(u'The registered view class %s isn\'t subclass of %s' %
(admin_view_class.__name__, BaseAdminView.__name__))
def register_view(self, path, admin_view_class, name):
self._registry_views.append((path, admin_view_class, name))
def register_plugin(self, plugin_class, admin_view_class):
from xadmin.views.base import BaseAdminPlugin
if issubclass(plugin_class, BaseAdminPlugin):
self._registry_plugins.setdefault(
admin_view_class, []).append(plugin_class)
else:
raise ImproperlyConfigured(u'The registered plugin class %s isn\'t subclass of %s' %
(plugin_class.__name__, BaseAdminPlugin.__name__))
def register_settings(self, name, admin_class):
self._registry_settings[name.lower()] = admin_class
def register(self, model_or_iterable, admin_class=object, **options):
from xadmin.views.base import BaseAdminView
if isinstance(model_or_iterable, ModelBase) or issubclass(model_or_iterable, BaseAdminView):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, ModelBase):
if model._meta.abstract:
raise ImproperlyConfigured('The model %s is abstract, so it '
'cannot be registered with admin.' % model.__name__)
if model in self._registry:
raise AlreadyRegistered(
'The model %s is already registered' % model.__name__)
# If we got **options then dynamically construct a subclass of
# admin_class with those **options.
if options:
# For reasons I don't quite understand, without a __module__
# the created class appears to "live" in the wrong place,
# which causes issues later on.
options['__module__'] = __name__
admin_class = type(str("%s%sAdmin" % (model._meta.app_label, model._meta.model_name)), (admin_class,), options or {})
admin_class.model = model
admin_class.order = self.model_admins_order
self.model_admins_order += 1
self._registry[model] = admin_class
else:
if model in self._registry_avs:
raise AlreadyRegistered('The admin_view_class %s is already registered' % model.__name__)
if options:
options['__module__'] = __name__
admin_class = type(str(
"%sAdmin" % model.__name__), (admin_class,), options)
# Instantiate the admin class to save in the registry
self._registry_avs[model] = admin_class
def unregister(self, model_or_iterable):
"""
Unregisters the given model(s).
If a model isn't already registered, this will raise NotRegistered.
"""
from xadmin.views.base import BaseAdminView
if isinstance(model_or_iterable, (ModelBase, BaseAdminView)):
model_or_iterable = [model_or_iterable]
for model in model_or_iterable:
if isinstance(model, ModelBase):
if model not in self._registry:
raise NotRegistered(
'The model %s is not registered' % model.__name__)
del self._registry[model]
else:
if model not in self._registry_avs:
raise NotRegistered('The admin_view_class %s is not registered' % model.__name__)
del self._registry_avs[model]
def set_loginview(self, login_view):
self.login_view = login_view
def has_permission(self, request):
"""
Returns True if the given HttpRequest has permission to view
*at least one* page in the admin site.
"""
return request.user.is_active and request.user.is_staff
def check_dependencies(self):
"""
Check that all things needed to run the admin have been correctly installed.
The default implementation checks that LogEntry, ContentType and the
auth context processor are installed.
"""
from django.contrib.contenttypes.models import ContentType
if not ContentType._meta.installed:
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.")
default_template_engine = Engine.get_default()
if not ('django.contrib.auth.context_processors.auth' in default_template_engine.context_processors or
'django.core.context_processors.auth' in default_template_engine.context_processors):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")
def admin_view(self, view, cacheable=False):
"""
Decorator to create an admin view attached to this ``AdminSite``. This
wraps the view and provides permission checking by calling
``self.has_permission``.
You'll want to use this from within ``AdminSite.get_urls()``:
class MyAdminSite(AdminSite):
def get_urls(self):
from django.conf.urls import url
urls = super(MyAdminSite, self).get_urls()
urls += [
url(r'^my_view/$', self.admin_view(some_view))
]
return urls
By default, admin_views are marked non-cacheable using the
``never_cache`` decorator. If the view can be safely cached, set
cacheable=True.
"""
def inner(request, *args, **kwargs):
if not self.has_permission(request) and getattr(view, 'need_site_permission', True):
return self.create_admin_view(self.login_view)(request, *args, **kwargs)
return view(request, *args, **kwargs)
if not cacheable:
inner = never_cache(inner)
return update_wrapper(inner, view)
def _get_merge_attrs(self, option_class, plugin_class):
return dict([(name, getattr(option_class, name)) for name in dir(option_class)
if name[0] != '_' and not callable(getattr(option_class, name)) and hasattr(plugin_class, name)])
def _get_settings_class(self, admin_view_class):
name = admin_view_class.__name__.lower()
if name in self._registry_settings:
return self._registry_settings[name]
elif name.endswith('admin') and name[0:-5] in self._registry_settings:
return self._registry_settings[name[0:-5]]
elif name.endswith('adminview') and name[0:-9] in self._registry_settings:
return self._registry_settings[name[0:-9]]
return None
def _create_plugin(self, option_classes):
def merge_class(plugin_class):
if option_classes:
attrs = {}
bases = [plugin_class]
for oc in option_classes:
attrs.update(self._get_merge_attrs(oc, plugin_class))
meta_class = getattr(oc, plugin_class.__name__, getattr(oc, plugin_class.__name__.replace('Plugin', ''), None))
if meta_class:
bases.insert(0, meta_class)
if attrs:
plugin_class = MergeAdminMetaclass(
'%s%s' % (''.join([oc.__name__ for oc in option_classes]), plugin_class.__name__),
tuple(bases), attrs)
return plugin_class
return merge_class
def get_plugins(self, admin_view_class, *option_classes):
from xadmin.views import BaseAdminView
plugins = []
opts = [oc for oc in option_classes if oc]
for klass in admin_view_class.mro():
if klass == BaseAdminView or issubclass(klass, BaseAdminView):
merge_opts = []
reg_class = self._registry_avs.get(klass)
if reg_class:
merge_opts.append(reg_class)
settings_class = self._get_settings_class(klass)
if settings_class:
merge_opts.append(settings_class)
merge_opts.extend(opts)
ps = self._registry_plugins.get(klass, [])
plugins.extend(map(self._create_plugin(
merge_opts), ps) if merge_opts else ps)
return plugins
def get_view_class(self, view_class, option_class=None, **opts):
merges = [option_class] if option_class else []
for klass in view_class.mro():
reg_class = self._registry_avs.get(klass)
if reg_class:
merges.append(reg_class)
settings_class = self._get_settings_class(klass)
if settings_class:
merges.append(settings_class)
merges.append(klass)
new_class_name = ''.join([c.__name__ for c in merges])
if new_class_name not in self._admin_view_cache:
plugins = self.get_plugins(view_class, option_class)
self._admin_view_cache[new_class_name] = MergeAdminMetaclass(
new_class_name, tuple(merges),
dict({'plugin_classes': plugins, 'admin_site': self}, **opts))
return self._admin_view_cache[new_class_name]
def create_admin_view(self, admin_view_class):
return self.get_view_class(admin_view_class).as_view()
def create_model_admin_view(self, admin_view_class, model, option_class):
return self.get_view_class(admin_view_class, option_class).as_view()
def get_urls(self):
from django.conf.urls import url, include
from xadmin.views.base import BaseAdminView
if settings.DEBUG:
self.check_dependencies()
def wrap(view, cacheable=False):
def wrapper(*args, **kwargs):
return self.admin_view(view, cacheable)(*args, **kwargs)
return update_wrapper(wrapper, view)
# Admin-site-wide views.
urlpatterns = [
url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n')
]
# Registed admin views
# inspect[isclass]: Only checks if the object is a class. With it lets you create an custom view that
# inherits from multiple views and have more of a metaclass.
urlpatterns += [
url(
path,
wrap(self.create_admin_view(clz_or_func))
if inspect.isclass(clz_or_func) and issubclass(clz_or_func, BaseAdminView)
else include(clz_or_func(self)),
name=name
)
for path, clz_or_func, name in self._registry_views
]
# Add in each model's views.
for model, admin_class in iteritems(self._registry):
view_urls = [
url(
path,
wrap(self.create_model_admin_view(clz, model, admin_class)),
name=name % (model._meta.app_label, model._meta.model_name)
)
for path, clz, name in self._registry_modelviews
]
urlpatterns += [
url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(view_urls))
]
return urlpatterns
@property
def urls(self):
return self.get_urls(), self.name, self.app_name
def i18n_javascript(self, request):
"""
Displays the i18n JavaScript that the Django admin requires.
This takes into account the USE_I18N setting. If it's set to False, the
generated JavaScript will be leaner and faster.
"""
if settings.USE_I18N:
from django.views.i18n import javascript_catalog
else:
from django.views.i18n import null_javascript_catalog as javascript_catalog
return javascript_catalog(request, packages=['django.conf', 'xadmin'])
# This global object represents the default admin site, for the common case.
# You can instantiate AdminSite in your own code to create a custom admin site.
site = AdminSite()
def register(models, **kwargs):
def _model_admin_wrapper(admin_class):
site.register(models, admin_class)
return _model_admin_wrapper<|fim▁end|>
|
# self.check_dependencies()
|
<|file_name|>block_test_util.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package tests
import (
"bytes"
"encoding/hex"
"fmt"
"io"
"math/big"
"path/filepath"
"runtime"
"strconv"
"strings"
"time"
"github.com/soilcurrency/go-ethereum/accounts"
"github.com/soilcurrency/go-ethereum/common"
"github.com/soilcurrency/go-ethereum/core"
"github.com/soilcurrency/go-ethereum/core/state"
"github.com/soilcurrency/go-ethereum/core/types"
"github.com/soilcurrency/go-ethereum/crypto"
"github.com/soilcurrency/go-ethereum/eth"
"github.com/soilcurrency/go-ethereum/ethdb"
"github.com/soilcurrency/go-ethereum/logger/glog"
"github.com/soilcurrency/go-ethereum/rlp"
)
// Block Test JSON Format
type BlockTest struct {
Genesis *types.Block
Json *btJSON
preAccounts map[string]btAccount
postAccounts map[string]btAccount
lastblockhash string
}
type btJSON struct {
Blocks []btBlock
GenesisBlockHeader btHeader
Pre map[string]btAccount
PostState map[string]btAccount
Lastblockhash string
}
type btBlock struct {
BlockHeader *btHeader
Rlp string
Transactions []btTransaction
UncleHeaders []*btHeader
}
type btAccount struct {
Balance string
Code string
Nonce string
Storage map[string]string
PrivateKey string
}
type btHeader struct {
Bloom string
Coinbase string
MixHash string
Nonce string
Number string
Hash string
ParentHash string
ReceiptTrie string
SeedHash string
StateRoot string
TransactionsTrie string
UncleHash string
ExtraData string
Difficulty string
GasLimit string
GasUsed string
Timestamp string
}
type btTransaction struct {
Data string
GasLimit string
GasPrice string
Nonce string
R string
S string
To string
V string
Value string
}
func RunBlockTestWithReader(r io.Reader, skipTests []string) error {
btjs := make(map[string]*btJSON)
if err := readJson(r, &btjs); err != nil {
return err
}
bt, err := convertBlockTests(btjs)
if err != nil {
return err
}
if err := runBlockTests(bt, skipTests); err != nil {
return err
}
return nil
}
func RunBlockTest(file string, skipTests []string) error {
btjs := make(map[string]*btJSON)
if err := readJsonFile(file, &btjs); err != nil {
return err
}
bt, err := convertBlockTests(btjs)
if err != nil {
return err
}
if err := runBlockTests(bt, skipTests); err != nil {
return err
}
return nil
}
func runBlockTests(bt map[string]*BlockTest, skipTests []string) error {
skipTest := make(map[string]bool, len(skipTests))
for _, name := range skipTests {
skipTest[name] = true
}
for name, test := range bt {
// if the test should be skipped, return
if skipTest[name] {
glog.Infoln("Skipping block test", name)
continue
}
// test the block
if err := runBlockTest(test); err != nil {
return fmt.Errorf("%s: %v", name, err)
}
glog.Infoln("Block test passed: ", name)
}
return nil
}
func runBlockTest(test *BlockTest) error {
ks := crypto.NewKeyStorePassphrase(filepath.Join(common.DefaultDataDir(), "keystore"), crypto.StandardScryptN, crypto.StandardScryptP)
am := accounts.NewManager(ks)
db, _ := ethdb.NewMemDatabase()
cfg := ð.Config{
DataDir: common.DefaultDataDir(),
Verbosity: 5,
Etherbase: common.Address{},
AccountManager: am,
NewDB: func(path string) (ethdb.Database, error) { return db, nil },
}
cfg.GenesisBlock = test.Genesis
// import pre accounts & construct test genesis block & state root
_, err := test.InsertPreState(db, am)
if err != nil {
return fmt.Errorf("InsertPreState: %v", err)
}
ethereum, err := eth.New(cfg)
if err != nil {
return err
}
err = ethereum.Start()
if err != nil {
return err
}
cm := ethereum.BlockChain()
validBlocks, err := test.TryBlocksInsert(cm)
if err != nil {
return err
}
lastblockhash := common.HexToHash(test.lastblockhash)
cmlast := cm.LastBlockHash()
if lastblockhash != cmlast {
return fmt.Errorf("lastblockhash validation mismatch: want: %x, have: %x", lastblockhash, cmlast)
}
newDB, err := cm.State()
if err != nil {
return err
}
if err = test.ValidatePostState(newDB); err != nil {
return fmt.Errorf("post state validation failed: %v", err)
}
return test.ValidateImportedHeaders(cm, validBlocks)
}
// InsertPreState populates the given database with the genesis
// accounts defined by the test.
func (t *BlockTest) InsertPreState(db ethdb.Database, am *accounts.Manager) (*state.StateDB, error) {
statedb, err := state.New(common.Hash{}, db)
if err != nil {
return nil, err
}
for addrString, acct := range t.preAccounts {
addr, err := hex.DecodeString(addrString)
if err != nil {
return nil, err
}
code, err := hex.DecodeString(strings.TrimPrefix(acct.Code, "0x"))
if err != nil {
return nil, err
}
balance, ok := new(big.Int).SetString(acct.Balance, 0)
if !ok {
return nil, err
}
nonce, err := strconv.ParseUint(prepInt(16, acct.Nonce), 16, 64)
if err != nil {
return nil, err
}
if acct.PrivateKey != "" {
privkey, err := hex.DecodeString(strings.TrimPrefix(acct.PrivateKey, "0x"))
err = crypto.ImportBlockTestKey(privkey)
err = am.TimedUnlock(common.BytesToAddress(addr), "", 999999*time.Second)
if err != nil {
return nil, err
}
}
obj := statedb.CreateAccount(common.HexToAddress(addrString))
obj.SetCode(code)
obj.SetBalance(balance)
obj.SetNonce(nonce)
for k, v := range acct.Storage {
statedb.SetState(common.HexToAddress(addrString), common.HexToHash(k), common.HexToHash(v))
}
}
root, err := statedb.Commit()
if err != nil {
return nil, fmt.Errorf("error writing state: %v", err)
}
if t.Genesis.Root() != root {
return nil, fmt.Errorf("computed state root does not match genesis block: genesis=%x computed=%x", t.Genesis.Root().Bytes()[:4], root.Bytes()[:4])
}
return statedb, nil
}
/* See https://github.com/ethereum/tests/wiki/Blockchain-Tests-II
Whether a block is valid or not is a bit subtle, it's defined by presence of
blockHeader, transactions and uncleHeaders fields. If they are missing, the block is
invalid and we must verify that we do not accept it.
Since some tests mix valid and invalid blocks we need to check this for every block.
If a block is invalid it does not necessarily fail the test, if it's invalidness is
expected we are expected to ignore it and continue processing and then validate the
post state.
*/
func (t *BlockTest) TryBlocksInsert(blockchain *core.BlockChain) ([]btBlock, error) {
validBlocks := make([]btBlock, 0)
// insert the test blocks, which will execute all transactions
for _, b := range t.Json.Blocks {
cb, err := mustConvertBlock(b)
if err != nil {
if b.BlockHeader == nil {
continue // OK - block is supposed to be invalid, continue with next block
} else {
return nil, fmt.Errorf("Block RLP decoding failed when expected to succeed: %v", err)
}
}
// RLP decoding worked, try to insert into chain:
_, err = blockchain.InsertChain(types.Blocks{cb})
if err != nil {
if b.BlockHeader == nil {
continue // OK - block is supposed to be invalid, continue with next block
} else {
return nil, fmt.Errorf("Block insertion into chain failed: %v", err)
}
}
if b.BlockHeader == nil {
return nil, fmt.Errorf("Block insertion should have failed")
}
// validate RLP decoding by checking all values against test file JSON
if err = validateHeader(b.BlockHeader, cb.Header()); err != nil {
return nil, fmt.Errorf("Deserialised block header validation failed: %v", err)
}
validBlocks = append(validBlocks, b)
}
return validBlocks, nil
}
func validateHeader(h *btHeader, h2 *types.Header) error {
expectedBloom := mustConvertBytes(h.Bloom)
if !bytes.Equal(expectedBloom, h2.Bloom.Bytes()) {
return fmt.Errorf("Bloom: want: %x have: %x", expectedBloom, h2.Bloom.Bytes())
}
expectedCoinbase := mustConvertBytes(h.Coinbase)
if !bytes.Equal(expectedCoinbase, h2.Coinbase.Bytes()) {
return fmt.Errorf("Coinbase: want: %x have: %x", expectedCoinbase, h2.Coinbase.Bytes())
}
expectedMixHashBytes := mustConvertBytes(h.MixHash)
if !bytes.Equal(expectedMixHashBytes, h2.MixDigest.Bytes()) {
return fmt.Errorf("MixHash: want: %x have: %x", expectedMixHashBytes, h2.MixDigest.Bytes())
}
expectedNonce := mustConvertBytes(h.Nonce)
if !bytes.Equal(expectedNonce, h2.Nonce[:]) {
return fmt.Errorf("Nonce: want: %x have: %x", expectedNonce, h2.Nonce)
}
expectedNumber := mustConvertBigInt(h.Number, 16)
if expectedNumber.Cmp(h2.Number) != 0 {
return fmt.Errorf("Number: want: %v have: %v", expectedNumber, h2.Number)
}
expectedParentHash := mustConvertBytes(h.ParentHash)
if !bytes.Equal(expectedParentHash, h2.ParentHash.Bytes()) {
return fmt.Errorf("Parent hash: want: %x have: %x", expectedParentHash, h2.ParentHash.Bytes())
}
expectedReceiptHash := mustConvertBytes(h.ReceiptTrie)
if !bytes.Equal(expectedReceiptHash, h2.ReceiptHash.Bytes()) {
return fmt.Errorf("Receipt hash: want: %x have: %x", expectedReceiptHash, h2.ReceiptHash.Bytes())
}
expectedTxHash := mustConvertBytes(h.TransactionsTrie)
if !bytes.Equal(expectedTxHash, h2.TxHash.Bytes()) {
return fmt.Errorf("Tx hash: want: %x have: %x", expectedTxHash, h2.TxHash.Bytes())
}
expectedStateHash := mustConvertBytes(h.StateRoot)
if !bytes.Equal(expectedStateHash, h2.Root.Bytes()) {
return fmt.Errorf("State hash: want: %x have: %x", expectedStateHash, h2.Root.Bytes())
}
expectedUncleHash := mustConvertBytes(h.UncleHash)
if !bytes.Equal(expectedUncleHash, h2.UncleHash.Bytes()) {
return fmt.Errorf("Uncle hash: want: %x have: %x", expectedUncleHash, h2.UncleHash.Bytes())
}
expectedExtraData := mustConvertBytes(h.ExtraData)
if !bytes.Equal(expectedExtraData, h2.Extra) {
return fmt.Errorf("Extra data: want: %x have: %x", expectedExtraData, h2.Extra)
}
expectedDifficulty := mustConvertBigInt(h.Difficulty, 16)
if expectedDifficulty.Cmp(h2.Difficulty) != 0 {
return fmt.Errorf("Difficulty: want: %v have: %v", expectedDifficulty, h2.Difficulty)
}
expectedGasLimit := mustConvertBigInt(h.GasLimit, 16)
if expectedGasLimit.Cmp(h2.GasLimit) != 0 {
return fmt.Errorf("GasLimit: want: %v have: %v", expectedGasLimit, h2.GasLimit)
}
expectedGasUsed := mustConvertBigInt(h.GasUsed, 16)
if expectedGasUsed.Cmp(h2.GasUsed) != 0 {
return fmt.Errorf("GasUsed: want: %v have: %v", expectedGasUsed, h2.GasUsed)
}
expectedTimestamp := mustConvertBigInt(h.Timestamp, 16)
if expectedTimestamp.Cmp(h2.Time) != 0 {
return fmt.Errorf("Timestamp: want: %v have: %v", expectedTimestamp, h2.Time)
}
return nil
}
func (t *BlockTest) ValidatePostState(statedb *state.StateDB) error {
// validate post state accounts in test file against what we have in state db
for addrString, acct := range t.postAccounts {
// XXX: is is worth it checking for errors here?
addr, err := hex.DecodeString(addrString)
if err != nil {
return err
}
code, err := hex.DecodeString(strings.TrimPrefix(acct.Code, "0x"))
if err != nil {
return err
}
balance, ok := new(big.Int).SetString(acct.Balance, 0)
if !ok {
return err
}
nonce, err := strconv.ParseUint(prepInt(16, acct.Nonce), 16, 64)
if err != nil {
return err
}
// address is indirectly verified by the other fields, as it's the db key
code2 := statedb.GetCode(common.BytesToAddress(addr))
balance2 := statedb.GetBalance(common.BytesToAddress(addr))
nonce2 := statedb.GetNonce(common.BytesToAddress(addr))
if !bytes.Equal(code2, code) {
return fmt.Errorf("account code mismatch for addr: %s want: %s have: %s", addrString, hex.EncodeToString(code), hex.EncodeToString(code2))
}
if balance2.Cmp(balance) != 0 {
return fmt.Errorf("account balance mismatch for addr: %s, want: %d, have: %d", addrString, balance, balance2)
}
if nonce2 != nonce {
return fmt.Errorf("account nonce mismatch for addr: %s want: %d have: %d", addrString, nonce, nonce2)
}
}
return nil
}
func (test *BlockTest) ValidateImportedHeaders(cm *core.BlockChain, validBlocks []btBlock) error {
// to get constant lookup when verifying block headers by hash (some tests have many blocks)
bmap := make(map[string]btBlock, len(test.Json.Blocks))
for _, b := range validBlocks {
bmap[b.BlockHeader.Hash] = b
}
// iterate over blocks backwards from HEAD and validate imported
// headers vs test file. some tests have reorgs, and we import
// block-by-block, so we can only validate imported headers after
// all blocks have been processed by ChainManager, as they may not
// be part of the longest chain until last block is imported.
for b := cm.CurrentBlock(); b != nil && b.NumberU64() != 0; b = cm.GetBlock(b.Header().ParentHash) {
bHash := common.Bytes2Hex(b.Hash().Bytes()) // hex without 0x prefix
if err := validateHeader(bmap[bHash].BlockHeader, b.Header()); err != nil {
return fmt.Errorf("Imported block header validation failed: %v", err)
}
}
return nil
}
func convertBlockTests(in map[string]*btJSON) (map[string]*BlockTest, error) {
out := make(map[string]*BlockTest)
for name, test := range in {
var err error
if out[name], err = convertBlockTest(test); err != nil {
return out, fmt.Errorf("bad test %q: %v", name, err)
}
}
return out, nil
}
func convertBlockTest(in *btJSON) (out *BlockTest, err error) {
// the conversion handles errors by catching panics.
// you might consider this ugly, but the alternative (passing errors)
// would be much harder to read.
defer func() {
if recovered := recover(); recovered != nil {
buf := make([]byte, 64<<10)
buf = buf[:runtime.Stack(buf, false)]
err = fmt.Errorf("%v\n%s", recovered, buf)
}
}()
out = &BlockTest{preAccounts: in.Pre, postAccounts: in.PostState, Json: in, lastblockhash: in.Lastblockhash}
out.Genesis = mustConvertGenesis(in.GenesisBlockHeader)
return out, err
}
func mustConvertGenesis(testGenesis btHeader) *types.Block {
hdr := mustConvertHeader(testGenesis)
hdr.Number = big.NewInt(0)
return types.NewBlockWithHeader(hdr)
}
func mustConvertHeader(in btHeader) *types.Header {
// hex decode these fields
header := &types.Header{
//SeedHash: mustConvertBytes(in.SeedHash),
MixDigest: mustConvertHash(in.MixHash),
Bloom: mustConvertBloom(in.Bloom),
ReceiptHash: mustConvertHash(in.ReceiptTrie),
TxHash: mustConvertHash(in.TransactionsTrie),
Root: mustConvertHash(in.StateRoot),
Coinbase: mustConvertAddress(in.Coinbase),
UncleHash: mustConvertHash(in.UncleHash),
ParentHash: mustConvertHash(in.ParentHash),
Extra: mustConvertBytes(in.ExtraData),
GasUsed: mustConvertBigInt(in.GasUsed, 16),
GasLimit: mustConvertBigInt(in.GasLimit, 16),
Difficulty: mustConvertBigInt(in.Difficulty, 16),
Time: mustConvertBigInt(in.Timestamp, 16),
Nonce: types.EncodeNonce(mustConvertUint(in.Nonce, 16)),
}
return header
}
func mustConvertBlock(testBlock btBlock) (*types.Block, error) {
var b types.Block
r := bytes.NewReader(mustConvertBytes(testBlock.Rlp))
err := rlp.Decode(r, &b)
return &b, err
}
func mustConvertBytes(in string) []byte {
if in == "0x" {
return []byte{}
}
h := unfuckFuckedHex(strings.TrimPrefix(in, "0x"))
out, err := hex.DecodeString(h)
if err != nil {
panic(fmt.Errorf("invalid hex: %q: ", h, err))
}
return out
}
func mustConvertHash(in string) common.Hash {
out, err := hex.DecodeString(strings.TrimPrefix(in, "0x"))
if err != nil {
panic(fmt.Errorf("invalid hex: %q", in))
}
return common.BytesToHash(out)
}<|fim▁hole|>func mustConvertAddress(in string) common.Address {
out, err := hex.DecodeString(strings.TrimPrefix(in, "0x"))
if err != nil {
panic(fmt.Errorf("invalid hex: %q", in))
}
return common.BytesToAddress(out)
}
func mustConvertBloom(in string) types.Bloom {
out, err := hex.DecodeString(strings.TrimPrefix(in, "0x"))
if err != nil {
panic(fmt.Errorf("invalid hex: %q", in))
}
return types.BytesToBloom(out)
}
func mustConvertBigInt(in string, base int) *big.Int {
in = prepInt(base, in)
out, ok := new(big.Int).SetString(in, base)
if !ok {
panic(fmt.Errorf("invalid integer: %q", in))
}
return out
}
func mustConvertUint(in string, base int) uint64 {
in = prepInt(base, in)
out, err := strconv.ParseUint(in, base, 64)
if err != nil {
panic(fmt.Errorf("invalid integer: %q", in))
}
return out
}
func LoadBlockTests(file string) (map[string]*BlockTest, error) {
btjs := make(map[string]*btJSON)
if err := readJsonFile(file, &btjs); err != nil {
return nil, err
}
return convertBlockTests(btjs)
}
// Nothing to see here, please move along...
func prepInt(base int, s string) string {
if base == 16 {
if strings.HasPrefix(s, "0x") {
s = s[2:]
}
if len(s) == 0 {
s = "00"
}
s = nibbleFix(s)
}
return s
}
// don't ask
func unfuckFuckedHex(almostHex string) string {
return nibbleFix(strings.Replace(almostHex, "v", "", -1))
}
func nibbleFix(s string) string {
if len(s)%2 != 0 {
s = "0" + s
}
return s
}<|fim▁end|>
| |
<|file_name|>number.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Immutable numbers.
#![allow(non_uppercase_statics)]
use base::{CFAllocatorRef, CFRelease, CFRetain, CFTypeID, CFTypeRef};
use base::{TCFType, kCFAllocatorDefault};
use libc::c_void;
use std::mem;
pub type CFNumberType = u32;
// members of enum CFNumberType
// static kCFNumberSInt8Type: CFNumberType = 1;
// static kCFNumberSInt16Type: CFNumberType = 2;
// static kCFNumberSInt32Type: CFNumberType = 3;
static kCFNumberSInt64Type: CFNumberType = 4;
// static kCFNumberFloat32Type: CFNumberType = 5;
static kCFNumberFloat64Type: CFNumberType = 6;
// static kCFNumberCharType: CFNumberType = 7;
// static kCFNumberShortType: CFNumberType = 8;
// static kCFNumberIntType: CFNumberType = 9;
// static kCFNumberLongType: CFNumberType = 10;
// static kCFNumberLongLongType: CFNumberType = 11;
// static kCFNumberFloatType: CFNumberType = 12;
// static kCFNumberDoubleType: CFNumberType = 13;
// static kCFNumberCFIndexType: CFNumberType = 14;
// static kCFNumberNSIntegerType: CFNumberType = 15;
// static kCFNumberCGFloatType: CFNumberType = 16;
// static kCFNumberMaxType: CFNumberType = 16;
struct __CFNumber;
pub type CFNumberRef = *const __CFNumber;
/// An immutable numeric value.
///
/// FIXME(pcwalton): Should be a newtype struct, but that fails due to a Rust compiler bug.
pub struct CFNumber {
obj: CFNumberRef,
}
impl Drop for CFNumber {
fn drop(&mut self) {
unsafe {
CFRelease(self.as_CFTypeRef())
}
}
}
impl TCFType<CFNumberRef> for CFNumber {
#[inline]
fn as_concrete_TypeRef(&self) -> CFNumberRef {
self.obj
}
#[inline]
unsafe fn wrap_under_get_rule(reference: CFNumberRef) -> CFNumber {
let reference: CFNumberRef = mem::transmute(CFRetain(mem::transmute(reference)));
TCFType::wrap_under_create_rule(reference)
}
#[inline]
fn as_CFTypeRef(&self) -> CFTypeRef {
unsafe {
mem::transmute(self.as_concrete_TypeRef())
}
}
unsafe fn wrap_under_create_rule(obj: CFNumberRef) -> CFNumber {
CFNumber {
obj: obj,
}
}
#[inline]
fn type_id(_: Option<CFNumber>) -> CFTypeID {
unsafe {
CFNumberGetTypeID()
}
}
}
// TODO(pcwalton): Floating point.
impl ToPrimitive for CFNumber {
#[inline]
fn to_i64(&self) -> Option<i64> {
unsafe {
let mut value: i64 = 0;
let ok = CFNumberGetValue(self.obj, kCFNumberSInt64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
#[inline]
fn to_u64(&self) -> Option<u64> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn to_f64(&self) -> Option<f64> {
unsafe {
let mut value: f64 = 0.0;
let ok = CFNumberGetValue(self.obj, kCFNumberFloat64Type, mem::transmute(&mut value));
assert!(ok);
Some(value)
}
}
}
// TODO(pcwalton): Floating point.
impl FromPrimitive for CFNumber {
#[inline]
fn from_i64(value: i64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberSInt64Type,<|fim▁hole|>
#[inline]
fn from_u64(_: u64) -> Option<CFNumber> {
// CFNumber does not support unsigned 64-bit values.
None
}
#[inline]
fn from_f64(value: f64) -> Option<CFNumber> {
unsafe {
let number_ref = CFNumberCreate(kCFAllocatorDefault,
kCFNumberFloat64Type,
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
}
/// A convenience function to create CFNumbers.
pub fn number(value: i64) -> CFNumber {
FromPrimitive::from_i64(value).unwrap()
}
#[link(name = "CoreFoundation", kind = "framework")]
extern {
/*
* CFNumber.h
*/
fn CFNumberCreate(allocator: CFAllocatorRef, theType: CFNumberType, valuePtr: *const c_void)
-> CFNumberRef;
//fn CFNumberGetByteSize
fn CFNumberGetValue(number: CFNumberRef, theType: CFNumberType, valuePtr: *mut c_void) -> bool;
//fn CFNumberCompare
fn CFNumberGetTypeID() -> CFTypeID;
}<|fim▁end|>
|
mem::transmute(&value));
Some(TCFType::wrap_under_create_rule(number_ref))
}
}
|
<|file_name|>symlink.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright 2003 Dave Abrahams
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
# Test the 'symlink' rule.
import os
import BoostBuild
if os.name != 'posix':
print "The symlink tests can be run on posix only."
import sys
sys.exit(1)
t = BoostBuild.Tester()
t.write("jamroot.jam", "import gcc ;")<|fim▁hole|>symlink hello_debug : hello/<variant>debug ;
symlink links/hello_release : hello/<variant>release ;
""")
t.write("hello.cpp", """
int main() {}
""")
t.run_build_system()
t.expect_addition([
'hello_debug.exe',
'hello_release.exe',
'links/hello_release.exe'])
t.cleanup()<|fim▁end|>
|
t.write("jamfile.jam", """
exe hello : hello.cpp ;
symlink hello_release : hello/<variant>release ;
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Version code adopted from Django development version.
https://github.com/django/django
"""
VERSION = (0, 7, 2, 'final', 0)
def get_version(version=None):
"""
Returns a PEP 386-compliant version number from VERSION.
"""
if version is None:
from modeltranslation import VERSION as version
else:
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = '.dev%s' % git_changeset
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return str(main + sub)
def get_git_changeset():
"""<|fim▁hole|> This value isn't guaranteed to be unique, but collisions are very unlikely,
so it's sufficient for generating the development version numbers.
TODO: Check if we can rely on services like read-the-docs to pick this up.
"""
import datetime
import os
import subprocess
repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
git_log = subprocess.Popen(
'git log --pretty=format:%ct --quiet -1 HEAD', stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=True, cwd=repo_dir,
universal_newlines=True)
timestamp = git_log.communicate()[0]
try:
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except ValueError:
return None
return timestamp.strftime('%Y%m%d%H%M%S')<|fim▁end|>
|
Returns a numeric identifier of the latest git changeset.
The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format.
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Jinjing Wang
This file is part of mtcp.
mtcp is free software: you can redistribute it and/or modify<|fim▁hole|>the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
mtcp is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with mtcp. If not, see <http://www.gnu.org/licenses/>.
*/
pub mod tun_file_descriptor;
pub mod connect;
#[cfg(target_os = "linux")]
pub mod linux;
#[cfg(target_os = "android")]
pub mod android;
pub mod api;<|fim▁end|>
|
it under the terms of the GNU General Public License as published by
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
"""Implements a widget that displays the player's current commitments."""
|
<|file_name|>clean.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import re
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils import six
from ansible.module_utils._text import to_text
from ansible.module_utils.common._collections_compat import MutableMapping, MutableSequence
from ansible.plugins.loader import connection_loader
from ansible.utils.display import Display
display = Display()
def module_response_deepcopy(v):
"""Function to create a deep copy of module response data
Designed to be used within the Ansible "engine" to improve performance
issues where ``copy.deepcopy`` was used previously, largely with CPU
and memory contention.
This only supports the following data types, and was designed to only
handle specific workloads:
* ``dict``
* ``list``
The data we pass here will come from a serialization such
as JSON, so we shouldn't have need for other data types such as
``set`` or ``tuple``.
Take note that this function should not be used extensively as a
replacement for ``deepcopy`` due to the naive way in which this
handles other data types.
Do not expect uses outside of those listed below to maintain
backwards compatibility, in case we need to extend this function
to handle our specific needs:
* ``ansible.executor.task_result.TaskResult.clean_copy``
* ``ansible.vars.clean.clean_facts``
* ``ansible.vars.namespace_facts``
"""
if isinstance(v, dict):
ret = v.copy()
items = six.iteritems(ret)
elif isinstance(v, list):
ret = v[:]
items = enumerate(ret)
else:
return v
for key, value in items:
if isinstance(value, (dict, list)):
ret[key] = module_response_deepcopy(value)
else:
ret[key] = value
return ret
def strip_internal_keys(dirty, exceptions=None):
# All keys starting with _ansible_ are internal, so change the 'dirty' mapping and remove them.
if exceptions is None:
exceptions = tuple()
if isinstance(dirty, MutableSequence):
for element in dirty:
if isinstance(element, (MutableMapping, MutableSequence)):
strip_internal_keys(element, exceptions=exceptions)
elif isinstance(dirty, MutableMapping):<|fim▁hole|> if isinstance(k, six.string_types):
if k.startswith('_ansible_') and k not in exceptions:
del dirty[k]
continue
if isinstance(dirty[k], (MutableMapping, MutableSequence)):
strip_internal_keys(dirty[k], exceptions=exceptions)
else:
raise AnsibleError("Cannot strip invalid keys from %s" % type(dirty))
return dirty
def remove_internal_keys(data):
'''
More nuanced version of strip_internal_keys
'''
for key in list(data.keys()):
if (key.startswith('_ansible_') and key != '_ansible_parsed') or key in C.INTERNAL_RESULT_KEYS:
display.warning("Removed unexpected internal key in module return: %s = %s" % (key, data[key]))
del data[key]
# remove bad/empty internal keys
for key in ['warnings', 'deprecations']:
if key in data and not data[key]:
del data[key]
# cleanse fact values that are allowed from actions but not modules
for key in list(data.get('ansible_facts', {}).keys()):
if key.startswith('discovered_interpreter_') or key.startswith('ansible_discovered_interpreter_'):
del data['ansible_facts'][key]
def clean_facts(facts):
''' remove facts that can override internal keys or otherwise deemed unsafe '''
data = module_response_deepcopy(facts)
remove_keys = set()
fact_keys = set(data.keys())
# first we add all of our magic variable names to the set of
# keys we want to remove from facts
# NOTE: these will eventually disappear in favor of others below
for magic_var in C.MAGIC_VARIABLE_MAPPING:
remove_keys.update(fact_keys.intersection(C.MAGIC_VARIABLE_MAPPING[magic_var]))
# remove common connection vars
remove_keys.update(fact_keys.intersection(C.COMMON_CONNECTION_VARS))
# next we remove any connection plugin specific vars
for conn_path in connection_loader.all(path_only=True):
conn_name = os.path.splitext(os.path.basename(conn_path))[0]
re_key = re.compile('^ansible_%s_' % re.escape(conn_name))
for fact_key in fact_keys:
# most lightweight VM or container tech creates devices with this pattern, this avoids filtering them out
if (re_key.match(fact_key) and not fact_key.endswith(('_bridge', '_gwbridge'))) or fact_key.startswith('ansible_become_'):
remove_keys.add(fact_key)
# remove some KNOWN keys
for hard in C.RESTRICTED_RESULT_KEYS + C.INTERNAL_RESULT_KEYS:
if hard in fact_keys:
remove_keys.add(hard)
# finally, we search for interpreter keys to remove
re_interp = re.compile('^ansible_.*_interpreter$')
for fact_key in fact_keys:
if re_interp.match(fact_key):
remove_keys.add(fact_key)
# then we remove them (except for ssh host keys)
for r_key in remove_keys:
if not r_key.startswith('ansible_ssh_host_key_'):
display.warning("Removed restricted key from module data: %s" % (r_key))
del data[r_key]
return strip_internal_keys(data)
def namespace_facts(facts):
''' return all facts inside 'ansible_facts' w/o an ansible_ prefix '''
deprefixed = {}
for k in facts:
if k.startswith('ansible_') and k not in ('ansible_local',):
deprefixed[k[8:]] = module_response_deepcopy(facts[k])
else:
deprefixed[k] = module_response_deepcopy(facts[k])
return {'ansible_facts': deprefixed}<|fim▁end|>
|
# listify to avoid updating dict while iterating over it
for k in list(dirty.keys()):
|
<|file_name|>test_form.py<|end_file_name|><|fim▁begin|>from constance.admin import ConstanceForm
from django.forms import fields
from django.test import TestCase
class TestForm(TestCase):
def test_form_field_types(self):
f = ConstanceForm({})
self.assertIsInstance(f.fields['INT_VALUE'], fields.IntegerField)
self.assertIsInstance(f.fields['BOOL_VALUE'], fields.BooleanField)
self.assertIsInstance(f.fields['STRING_VALUE'], fields.CharField)
self.assertIsInstance(f.fields['DECIMAL_VALUE'], fields.DecimalField)
self.assertIsInstance(f.fields['DATETIME_VALUE'], fields.SplitDateTimeField)
self.assertIsInstance(f.fields['TIMEDELTA_VALUE'], fields.DurationField)
self.assertIsInstance(f.fields['FLOAT_VALUE'], fields.FloatField)
self.assertIsInstance(f.fields['DATE_VALUE'], fields.DateField)
self.assertIsInstance(f.fields['TIME_VALUE'], fields.TimeField)
<|fim▁hole|> self.assertIsInstance(f.fields['EMAIL_VALUE'], fields.EmailField)<|fim▁end|>
|
# from CONSTANCE_ADDITIONAL_FIELDS
self.assertIsInstance(f.fields['CHOICE_VALUE'], fields.ChoiceField)
|
<|file_name|>e424d03ba260_.py<|end_file_name|><|fim▁begin|>"""empty message
Revision ID: e424d03ba260
Revises: ace8d095a26b
Create Date: 2017-10-12 11:25:11.775853
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e424d03ba260'
down_revision = 'ace8d095a26b'
branch_labels = None
depends_on = None<|fim▁hole|>def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('gist', sa.Column('lang', sa.String(length=30), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('gist', 'lang')
# ### end Alembic commands ###<|fim▁end|>
| |
<|file_name|>issue-19982.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(core,unboxed_closures)]
#[allow(dead_code)]
struct Foo;
impl<'a> Fn<(&'a (),)> for Foo {
type Output = ();
extern "rust-call" fn call(&self, (_,): (&(),)) {}<|fim▁hole|><|fim▁end|>
|
}
fn main() {}
|
<|file_name|>hostname.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package format
import (
"github.com/trivago/gollum/core"
"os"
)
// Hostname formatter
//
// This formatter prefixes the message or metadata with the hostname of
// the machine gollum is running on.
//
// Parameters
//
// - Separator: Defines the separator string placed between hostname and data.
// By default this parameter is set to ":".
//
// Examples
//
// This example inserts the hostname into an existing JSON payload.
//
// exampleProducer:
// Type: producer.Console
// Streams: "*"
// Modulators:
// - format.Trim:
// LeftSeparator: "{"
// RightSeparator: "}"
// - format.Hostname
// Separator: ","
// - format.Envelope:
// Prefix: "{\"host\":"
// Postfix: "}"
type Hostname struct {
core.SimpleFormatter `gollumdoc:"embed_type"`
separator []byte `config:"Separator" default:":"`
}
func init() {
core.TypeRegistry.Register(Hostname{})
}
// Configure initializes this formatter with values from a plugin config.
func (format *Hostname) Configure(conf core.PluginConfigReader) {
}
// ApplyFormatter update message payload
func (format *Hostname) ApplyFormatter(msg *core.Message) error {
content := format.getFinalContent(format.GetAppliedContent(msg))
format.SetAppliedContent(msg, content)
return nil
}
func (format *Hostname) getFinalContent(content []byte) []byte {
hostname, err := os.Hostname()
if err != nil {
format.Logger.Error(err)
hostname = "unknown host"
}
dataSize := len(hostname) + len(format.separator) + len(content)
payload := core.MessageDataPool.Get(dataSize)
offset := copy(payload, []byte(hostname))
offset += copy(payload[offset:], format.separator)
copy(payload[offset:], content)
return payload
}<|fim▁end|>
|
// Copyright 2015-2017 trivago GmbH
//
// Licensed under the Apache License, Version 2.0 (the "License");
|
<|file_name|>duplicated-external-mods.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//<|fim▁hole|>// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-fast
// aux-build:anon-extern-mod-cross-crate-1.rs
// aux-build:anon-extern-mod-cross-crate-1.rs
extern crate anonexternmod;
pub fn main() { }<|fim▁end|>
| |
<|file_name|>lsm6ds33.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""Python library module for LSM6DS33 accelerometer and gyroscope.
This module for the Raspberry Pi computer helps interface the LSM6DS33
accelerometer and gyro.The library makes it easy to read
the raw accelerometer and gyro data through I²C interface and it also provides
methods for getting angular velocity and g forces.
The datasheet for the LSM6DS33 is available at
[https://www.pololu.com/file/download/LSM6DS33.pdf?file_id=0J1087]
"""
import math
from i2c import I2C
from time import sleep
from constants import *
class LSM6DS33(I2C):
""" Set up and access LSM6DS33 accelerometer and gyroscope.
"""
# Output registers used by the gyroscope
gyro_registers = [
LSM6DS33_OUTX_L_G, # low byte of X value
LSM6DS33_OUTX_H_G, # high byte of X value
LSM6DS33_OUTY_L_G, # low byte of Y value
LSM6DS33_OUTY_H_G, # high byte of Y value
LSM6DS33_OUTZ_L_G, # low byte of Z value
LSM6DS33_OUTZ_H_G, # high byte of Z value
]
# Output registers used by the accelerometer
accel_registers = [
LSM6DS33_OUTX_L_XL, # low byte of X value
LSM6DS33_OUTX_H_XL, # high byte of X value
LSM6DS33_OUTY_L_XL, # low byte of Y value
LSM6DS33_OUTY_H_XL, # high byte of Y value
LSM6DS33_OUTZ_L_XL, # low byte of Z value
LSM6DS33_OUTZ_H_XL, # high byte of Z value
]
def __init__(self, bus_id=1):
""" Set up I2C connection and initialize some flags and values.
"""
super(LSM6DS33, self).__init__(bus_id)
self.is_accel_enabled = False
self.is_gyro_enabled = False
self.is_gyro_calibrated = False
self.gyro_cal = [0, 0, 0]
self.is_accel_calibrated = False
self.accel_angle_cal = [0, 0]
def __del__(self):
""" Clean up."""
try:
# Power down accelerometer and gyro
self.writeRegister(LSM6DS33_ADDR, LSM6DS33_CTRL1_XL, 0x00)
self.writeRegister(LSM6DS33_ADDR, LSM6DS33_CTRL2_G, 0x00)
super(LSM6DS33, self).__del__()
print('Destroying')
except:
pass
def enable(self, accelerometer=True, gyroscope=True, calibration=True):
""" Enable and set up the given sensors in the IMU."""
if accelerometer:
# 1.66 kHz (high performance) / +/- 4g
# binary value -> 0b01011000, hex value -> 0x58
self.write_register(LSM6DS33_ADDR, LSM6DS33_CTRL1_XL, 0x58)
self.is_accel_enabled = True
if gyroscope:
# 208 Hz (high performance) / 1000 dps
# binary value -> 0b01011000, hex value -> 0x58
self.write_register(LSM6DS33_ADDR, LSM6DS33_CTRL2_G, 0x58)
self.is_gyro_enabled = True
if calibration:
self.calibrate()
self.is_gyro_calibrated = True
self.is_accel_calibrated = True
def calibrate(self, iterations=2000):
""" Calibrate the gyro's raw values."""
print('Calibrating Gryo and Accelerometer...')
for i in range(iterations):
gyro_raw = self.get_gyroscope_raw()
accel_angles = self.get_accelerometer_angles()
self.gyro_cal[0] += gyro_raw[0]
self.gyro_cal[1] += gyro_raw[1]
self.gyro_cal[2] += gyro_raw[2]
self.accel_angle_cal[0] += accel_angles[0]
self.accel_angle_cal[1] += accel_angles[1]
sleep(0.004)
self.gyro_cal[0] /= iterations
self.gyro_cal[1] /= iterations
self.gyro_cal[2] /= iterations
self.accel_angle_cal[0] /= iterations
self.accel_angle_cal[1] /= iterations
print('Calibration Done')
def get_gyroscope_raw(self):
""" Return a 3D vector of raw gyro data.
"""
# Check if gyroscope has been enabled
if not self.is_gyro_enabled:
raise(Exception('Gyroscope is not enabled!'))
sensor_data = self.read_3d_sensor(LSM6DS33_ADDR, self.gyro_registers)
# Return the vector
if self.is_gyro_calibrated:
calibrated_gyro_data = sensor_data
calibrated_gyro_data[0] -= self.gyro_cal[0]
calibrated_gyro_data[1] -= self.gyro_cal[1]
calibrated_gyro_data[2] -= self.gyro_cal[2]
return calibrated_gyro_data
else:
return sensor_data
def get_gyro_angular_velocity(self):
""" Return a 3D vector of the angular velocity measured by the gyro
in degrees/second.
"""
# Check if gyroscope has been enabled
if not self.is_gyro_enabled:
raise(Exception('Gyroscope is not enabled!'))
# Check if gyroscope has been calibrated
if not self.is_gyro_calibrated:
raise(Exception('Gyroscope is not calibrated!'))
gyro_data = self.get_gyroscope_raw()
gyro_data[0] = (gyro_data[0] * GYRO_GAIN) / 1000
gyro_data[1] = (gyro_data[1] * GYRO_GAIN) / 1000
gyro_data[2] = (gyro_data[2] * GYRO_GAIN) / 1000
return gyro_data
def get_accelerometer_raw(self):
""" Return a 3D vector of raw accelerometer data.
"""
<|fim▁hole|> raise(Exception('Accelerometer is not enabled!'))
return self.read_3d_sensor(LSM6DS33_ADDR, self.accel_registers)
def get_accelerometer_g_forces(self):
""" Return a 3D vector of the g forces measured by the accelerometer"""
[x_val, y_val, z_val] = self.get_accelerometer_raw()
x_val = (x_val * ACCEL_CONVERSION_FACTOR) / 1000
y_val = (y_val * ACCEL_CONVERSION_FACTOR) / 1000
z_val = (z_val * ACCEL_CONVERSION_FACTOR) / 1000
return [x_val, y_val, z_val]
def get_accelerometer_angles(self, round_digits=0):
""" Return a 2D vector of roll and pitch angles,
based on accelerometer g forces
"""
# Get raw accelerometer g forces
[acc_xg_force, acc_yg_force, acc_zg_force] = self.get_accelerometer_g_forces()
# Calculate angles
xz_dist = self._get_dist(acc_xg_force, acc_zg_force)
yz_dist = self._get_dist(acc_yg_force, acc_zg_force)
accel_roll_angle = math.degrees(math.atan2(acc_yg_force, xz_dist))
accel_pitch_angle = -math.degrees(math.atan2(acc_xg_force, yz_dist))
if self.is_accel_calibrated:
accel_roll_angle -= self.accel_angle_cal[0]
accel_pitch_angle -= self.accel_angle_cal[1]
if round_digits != 0:
return [round(accel_roll_angle, round_digits), round(accel_pitch_angle, round_digits)]
else:
return [accel_roll_angle, accel_pitch_angle]
else:
return [accel_roll_angle, accel_pitch_angle]
def _get_dist(self, a, b):
return math.sqrt((a * a) + (b * b))<|fim▁end|>
|
# Check if accelerometer has been enabled
if not self.is_accel_enabled:
|
<|file_name|>Insanity.js<|end_file_name|><|fim▁begin|>import Analyzer from 'parser/core/Analyzer';
import RESOURCE_TYPES from 'game/RESOURCE_TYPES';
class Insanity extends Analyzer {
_insanityEvents = [];
on_toPlayer_energize(event) {
if (event.resourceChangeType === RESOURCE_TYPES.INSANITY.id) {
this._insanityEvents = [
...this._insanityEvents,
event,
];
}<|fim▁hole|> }
}
export default Insanity;<|fim▁end|>
|
}
get events() {
return this._insanityEvents;
|
<|file_name|>a30_entityAction.js<|end_file_name|><|fim▁begin|>/**
@module breeze
**/
var EntityAction = (function () {
/**
EntityAction is an 'Enum' containing all of the valid actions that can occur to an 'Entity'.
@class EntityAction
@static
**/
var entityActionMethods = {
isAttach: function () {
return !!this.isAttach;
},
isDetach: function () {
return !!this.isDetach;
},
isModification: function () {
return !!this.isModification;
}
};
var EntityAction = new Enum("EntityAction", entityActionMethods);
/**
Attach - Entity was attached via an AttachEntity call.
@property Attach {EntityAction}
@final
@static
**/
EntityAction.Attach = EntityAction.addSymbol({ isAttach: true});
/**
AttachOnQuery - Entity was attached as a result of a query.
@property AttachOnQuery {EntityAction}
@final
@static
**/
EntityAction.AttachOnQuery = EntityAction.addSymbol({ isAttach: true});
/**
AttachOnImport - Entity was attached as a result of an import.
@property AttachOnImport {EntityAction}
@final
@static
**/
EntityAction.AttachOnImport = EntityAction.addSymbol({ isAttach: true});
/**
Detach - Entity was detached.
@property Detach {EntityAction}
@final
@static
**/
EntityAction.Detach = EntityAction.addSymbol({ isDetach: true });
/**
MergeOnQuery - Properties on the entity were merged as a result of a query.
@property MergeOnQuery {EntityAction}<|fim▁hole|> @final
@static
**/
EntityAction.MergeOnQuery = EntityAction.addSymbol({ isModification: true });
/**
MergeOnImport - Properties on the entity were merged as a result of an import.
@property MergeOnImport {EntityAction}
@final
@static
**/
EntityAction.MergeOnImport = EntityAction.addSymbol({ isModification: true });
/**
MergeOnSave - Properties on the entity were merged as a result of a save
@property MergeOnSave {EntityAction}
@final
@static
**/
EntityAction.MergeOnSave = EntityAction.addSymbol({ isModification: true });
/**
PropertyChange - A property on the entity was changed.
@property PropertyChange {EntityAction}
@final
@static
**/
EntityAction.PropertyChange = EntityAction.addSymbol({ isModification: true});
/**
EntityStateChange - The EntityState of the entity was changed.
@property EntityStateChange {EntityAction}
@final
@static
**/
EntityAction.EntityStateChange = EntityAction.addSymbol();
/**
AcceptChanges - AcceptChanges was called on the entity, or its entityState was set to Unmodified.
@property AcceptChanges {EntityAction}
@final
@static
**/
EntityAction.AcceptChanges = EntityAction.addSymbol();
/**
RejectChanges - RejectChanges was called on the entity.
@property RejectChanges {EntityAction}
@final
@static
**/
EntityAction.RejectChanges = EntityAction.addSymbol({ isModification: true});
/**
Clear - The EntityManager was cleared. All entities detached.
@property Clear {EntityAction}
@final
@static
**/
EntityAction.Clear = EntityAction.addSymbol({ isDetach: true});
EntityAction.resolveSymbols();
return EntityAction;
})();
breeze.EntityAction = EntityAction;<|fim▁end|>
| |
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#![cfg(target_os = "ios")]
use GlAttributes;
use CreationError;
use PixelFormat;
use PixelFormatRequirements;
use ContextError;
pub use api::ios::*;
#[derive(Default)]
pub struct PlatformSpecificHeadlessBuilderAttributes;
pub struct HeadlessContext(i32);
impl HeadlessContext {
/// See the docs in the crate root file.
pub fn new(_: (u32, u32), _: &PixelFormatRequirements, _: &GlAttributes<&HeadlessContext>,
_: &PlatformSpecificHeadlessBuilderAttributes)
-> Result<HeadlessContext, CreationError>
{
unimplemented!()
}
/// See the docs in the crate root file.
pub unsafe fn make_current(&self) -> Result<(), ContextError> {
unimplemented!()
}
pub fn swap_buffers(&self) -> Result<(), ContextError> {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn is_current(&self) -> bool {
unimplemented!()
}
/// See the docs in the crate root file.
pub fn get_proc_address(&self, _addr: &str) -> *const () {<|fim▁hole|> }
pub fn get_api(&self) -> ::Api {
::Api::OpenGlEs
}
pub fn get_pixel_format(&self) -> PixelFormat {
unimplemented!();
}
}
unsafe impl Send for HeadlessContext {}
unsafe impl Sync for HeadlessContext {}<|fim▁end|>
|
unimplemented!()
|
<|file_name|>environment.ts<|end_file_name|><|fim▁begin|>// The file contents for the current environment will overwrite these during build.
// The build system defaults to the dev environment which uses `environment.ts`, but if you do
// `ng build --env=prod` then `environment.prod.ts` will be used instead.
// The list of which env maps to which file can be found in `.angular-cli.json`.
export const environment = {
production: false,<|fim▁hole|><|fim▁end|>
|
functionEndPoint: 'http://localhost:7071/api/SessionAttendeePhotos/api/SessionAttendeePhotos'
};
|
<|file_name|>messages.py<|end_file_name|><|fim▁begin|>import string
import socket
import base64
import sys
class message:
def __init__(self, name="generate" ):
if name == "generate":
self.name=socket.gethostname()
else:
self.name=name
<|fim▁hole|>
def set ( self, content=" " ):
base64content = base64.b64encode ( content )
self.decoded="piratebox;"+ self.type + ";01;" + self.name + ";" + base64content
def get ( self ):
# TODO Split decoded part
message_parts = string.split ( self.decoded , ";" )
if message_parts[0] != "piratebox":
return None
b64_content_part = message_parts[4]
content = base64.b64decode ( b64_content_part )
return content
def get_sendername (self):
return self.name
def get_message ( self ):
return self.decoded
def set_message ( self , decoded):
self.decoded = decoded
class shoutbox_message(message):
def __init__(self, name="generate" ):
message.__init__( self , name)
self.type="sb"<|fim▁end|>
|
self.type="gc"
self.decoded=""
|
<|file_name|>tchannel_client.go<|end_file_name|><|fim▁begin|>// Copyright (c) 2022 Uber Technologies, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
package zanzibar
import (
"context"<|fim▁hole|>
"github.com/pkg/errors"
"github.com/uber-go/tally"
"github.com/uber/tchannel-go"
"github.com/uber/zanzibar/runtime/ruleengine"
netContext "golang.org/x/net/context"
)
const (
logFieldClientID = "clientID"
// thrift service::method of client thrift spec
logFieldClientThriftMethod = "clientThriftMethod"
// the backend service corresponding to the client
logFieldClientService = "clientService"
// the method name for a particular client method call
logFieldClientMethod = "clientMethod"
)
// TChannelClientOption is used when creating a new TChannelClient
type TChannelClientOption struct {
ServiceName string
ClientID string
Timeout time.Duration
TimeoutPerAttempt time.Duration
RoutingKey *string
// MethodNames is a map from "ThriftService::method" to "ZanzibarMethodName",
// where ThriftService and method are from the service's Thrift IDL, and
// ZanzibarMethodName is the public method name exposed on the Zanzibar-generated
// client, from the zanzibar configuration. For example, if a client named FooClient
// has a methodMap of map[string]string{"Foo::bar":"Bar"}, then one can do
// `FooClient.Bar()` to issue a RPC to Thrift service `Foo`'s `bar` method.
MethodNames map[string]string
// Dynamically determine which alternate channel to call dynamically based on ruleEngine,
// else fallback to default routing
RuleEngine ruleengine.RuleEngine
// list of headers which would be looked for matching a request with ruleEngine
HeaderPatterns []string
// the header key that is used together with the request uuid on context to
// form a header when sending the request to downstream, e.g. "x-request-uuid"
RequestUUIDHeaderKey string
// AltChannelMap is a map for dynamic lookup of alternative channels
AltChannelMap map[string]*tchannel.SubChannel
// MaxAttempts is the maximum retry count for a client
MaxAttempts int
}
// TChannelClient implements TChannelCaller and makes outgoing Thrift calls.
type TChannelClient struct {
ClientID string
ContextLogger ContextLogger
ch *tchannel.Channel
sc *tchannel.SubChannel
scAlt *tchannel.SubChannel
serviceName string
methodNames map[string]string
timeout time.Duration
timeoutPerAttempt time.Duration
routingKey *string
shardKey *string
metrics ContextMetrics
contextExtractor ContextExtractor
requestUUIDHeaderKey string
ruleEngine ruleengine.RuleEngine
headerPatterns []string
altChannelMap map[string]*tchannel.SubChannel
maxAttempts int
}
// NewTChannelClient is deprecated, use NewTChannelClientContext instead
func NewTChannelClient(
ch *tchannel.Channel,
contextLogger ContextLogger,
scope tally.Scope,
contextExtractor ContextExtractor,
opt *TChannelClientOption,
) *TChannelClient {
return NewTChannelClientContext(
ch,
contextLogger,
NewContextMetrics(scope),
contextExtractor,
opt,
)
}
// NewTChannelClientContext returns a TChannelClient that makes calls over the given tchannel to the given thrift service.
func NewTChannelClientContext(
ch *tchannel.Channel,
contextLogger ContextLogger,
metrics ContextMetrics,
contextExtractor ContextExtractor,
opt *TChannelClientOption,
) *TChannelClient {
client := &TChannelClient{
ch: ch,
sc: ch.GetSubChannel(opt.ServiceName),
serviceName: opt.ServiceName,
ClientID: opt.ClientID,
methodNames: opt.MethodNames,
timeout: opt.Timeout,
timeoutPerAttempt: opt.TimeoutPerAttempt,
routingKey: opt.RoutingKey,
ContextLogger: contextLogger,
metrics: metrics,
contextExtractor: contextExtractor,
requestUUIDHeaderKey: opt.RequestUUIDHeaderKey,
ruleEngine: opt.RuleEngine,
headerPatterns: opt.HeaderPatterns,
altChannelMap: opt.AltChannelMap,
maxAttempts: opt.MaxAttempts,
}
return client
}
// Call makes a RPC call to the given service.
func (c *TChannelClient) Call(
ctx context.Context,
thriftService, methodName string,
reqHeaders map[string]string,
req, resp RWTStruct,
) (success bool, resHeaders map[string]string, err error) {
serviceMethod := thriftService + "::" + methodName
scopeTags := map[string]string{
scopeTagClient: c.ClientID,
scopeTagClientMethod: methodName,
scopeTagsTargetService: c.serviceName,
scopeTagsTargetEndpoint: serviceMethod,
}
ctx = WithScopeTags(ctx, scopeTags)
call := &tchannelOutboundCall{
client: c,
methodName: c.methodNames[serviceMethod],
serviceMethod: serviceMethod,
reqHeaders: reqHeaders,
contextLogger: c.ContextLogger,
metrics: c.metrics,
}
return c.call(ctx, call, reqHeaders, req, resp)
}
func (c *TChannelClient) call(
ctx context.Context,
call *tchannelOutboundCall,
reqHeaders map[string]string,
req, resp RWTStruct,
) (success bool, resHeaders map[string]string, err error) {
defer func() {
call.finish(ctx, err)
if call.resHeaders == nil {
call.resHeaders = make(map[string]string)
}
call.resHeaders[ClientResponseDurationKey] = call.duration.String()
}()
call.start()
reqUUID := RequestUUIDFromCtx(ctx)
if reqUUID != "" {
if reqHeaders == nil {
reqHeaders = make(map[string]string)
}
reqHeaders[c.requestUUIDHeaderKey] = reqUUID
}
// Start passing the MaxAttempt field which will be used while creating the RetryOptions.
// Note : No impact on the existing clients because MaxAttempt will be passed as 0 and it will default to 5 while retrying the execution.
// More details can be found at https://t3.uberinternal.com/browse/EDGE-8526
retryOpts := tchannel.RetryOptions{
TimeoutPerAttempt: c.timeoutPerAttempt,
MaxAttempts: c.maxAttempts,
}
ctxBuilder := tchannel.NewContextBuilder(c.timeout).
SetParentContext(ctx).
SetRetryOptions(&retryOpts)
if c.routingKey != nil {
ctxBuilder.SetRoutingKey(*c.routingKey)
}
rd := GetRoutingDelegateFromCtx(ctx)
if rd != "" {
ctxBuilder.SetRoutingDelegate(rd)
}
sk := GetShardKeyFromCtx(ctx)
if sk != "" {
ctxBuilder.SetShardKey(sk)
}
ctx, cancel := ctxBuilder.Build()
defer cancel()
err = c.ch.RunWithRetry(ctx, func(ctx netContext.Context, rs *tchannel.RequestState) (cerr error) {
call.resHeaders = map[string]string{}
call.success = false
sc, ctx := c.getDynamicChannelWithFallback(reqHeaders, c.sc, ctx)
call.call, cerr = sc.BeginCall(ctx, call.serviceMethod, &tchannel.CallOptions{
Format: tchannel.Thrift,
ShardKey: GetShardKeyFromCtx(ctx),
RequestState: rs,
RoutingDelegate: GetRoutingDelegateFromCtx(ctx),
})
if cerr != nil {
return errors.Wrapf(
err, "Could not begin outbound %s.%s (%s %s) request",
call.client.ClientID, call.methodName, call.client.serviceName, call.serviceMethod,
)
}
// trace request
reqHeaders = tchannel.InjectOutboundSpan(call.call.Response(), reqHeaders)
if cerr := call.writeReqHeaders(reqHeaders); cerr != nil {
return cerr
}
if cerr := call.writeReqBody(ctx, req); cerr != nil {
return cerr
}
response := call.call.Response()
if cerr = call.readResHeaders(response); cerr != nil {
return cerr
}
if cerr = call.readResBody(ctx, response, resp); cerr != nil {
return cerr
}
return cerr
})
if err != nil {
// Do not wrap system errors.
if _, ok := err.(tchannel.SystemError); ok {
return call.success, call.resHeaders, err
}
return call.success, nil, errors.Wrapf(
err, "Could not make outbound %s.%s (%s %s) response",
call.client.ClientID, call.methodName, call.client.serviceName, call.serviceMethod,
)
}
return call.success, call.resHeaders, err
}
// first rule match, would be the chosen channel. if nothing matches fallback to default channel
func (c *TChannelClient) getDynamicChannelWithFallback(reqHeaders map[string]string,
sc *tchannel.SubChannel, ctx netContext.Context) (*tchannel.SubChannel, netContext.Context) {
ch := sc
if c.ruleEngine == nil {
return ch, ctx
}
for _, headerPattern := range c.headerPatterns {
// this header is not present, so can't match a rule
headerPatternVal, ok := reqHeaders[headerPattern]
if !ok {
continue
}
val, match := c.ruleEngine.GetValue(headerPattern, strings.ToLower(headerPatternVal))
// if rule doesn't match, continue with a next input
if !match {
continue
}
serviceDetails := val.([]string)
// we know service has a channel, as this was constructed in c'tor
ch = c.altChannelMap[serviceDetails[0]]
if len(serviceDetails) > 1 {
ctx = WithRoutingDelegate(ctx, serviceDetails[1])
}
return ch, ctx
}
// if nothing matches return the default channel/**/
return ch, ctx
}<|fim▁end|>
|
"strings"
"time"
|
<|file_name|>entity-view.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, ViewChild } from '@angular/core';
import { Router, ROUTER_DIRECTIVES } from '@angular/router';
import { ActivatedRoute } from '@angular/router';
import {CORE_DIRECTIVES} from '@angular/common';
import {TAB_DIRECTIVES} from 'ng2-bootstrap/ng2-bootstrap';
import { EntitySummaryComponent } from './entity-summary.component';
import { SectionComponent, GridComponent } from '../../shared/index';
import { EntityService, ContractsService } from '../../services/index';
@Component({
moduleId: module.id,
templateUrl: 'entity-view.component.html',<|fim▁hole|> ROUTER_DIRECTIVES, TAB_DIRECTIVES, SectionComponent, EntitySummaryComponent, GridComponent
],
viewProviders: [EntityService, ContractsService],
})
export class EntityViewComponent implements OnInit {
entity: Object = {};
contracts: Object[];
constructor(private route: ActivatedRoute, private entityService: EntityService, private contractsService: ContractsService) { }
ngOnInit() {
this.route.params
.map(params => params['id'])
.subscribe((id) => {
this.entityService
.getOneById(id)
.subscribe(entity => this.entity = entity);
});
this.loadContracts();
}
ngAfterViewInit(): void {
}
loadContracts(): void {
this.contractsService.getAll().subscribe((contracts: Object[]) => {
this.contracts = contracts;
});
}
config = {
"columns": [{ "name": "name", "header": "Name" },
{ "name": "description", "header": "Description" },
{ "name": "counterparty", "header": "Counterparty" },
{ "name": "startDate", "header": "Start Date" }]
}
}<|fim▁end|>
|
styleUrls: ['entity-view.component.css'],
directives: [
|
<|file_name|>test_06_ingest.py<|end_file_name|><|fim▁begin|>"""Test icatdump and icatingest.
"""
from subprocess import CalledProcessError
import pytest
import icat
import icat.config
from icat.query import Query
from conftest import DummyDatafile, gettestdata, getConfig, callscript
# Test input
ds_params = str(gettestdata("ingest-ds-params.xml"))
datafiles = str(gettestdata("ingest-datafiles.xml"))
@pytest.fixture(scope="module")
def client(setupicat):
client, conf = getConfig(confSection="acord", ids="mandatory")
client.login(conf.auth, conf.credentials)
return client
@pytest.fixture(scope="module")
def cmdargs(setupicat):
_, conf = getConfig(confSection="acord", ids="mandatory")
return conf.cmdargs + ["-f", "XML"]
@pytest.fixture(scope="function")
def dataset(client):
"""A dataset to be used in the test.
The dataset is not created by the fixture, it is assumed that the
test does it. The dataset will be eventually be deleted after the
test.
"""
inv = client.assertedSearch("Investigation [name='10100601-ST']")[0]
dstype = client.assertedSearch("DatasetType [name='raw']")[0]
dataset = client.new("dataset",
name="e208343", complete=False,
investigation=inv, type=dstype)
yield dataset
try:
ds = client.searchMatching(dataset)
dataset.id = ds.id
except icat.SearchResultError:
# Dataset not found, maybe the test failed, nothing to
# clean up then.
pass
else:
# If any datafile has been uploaded (i.e. the location is
# not NULL), need to delete it from IDS first. Any other
# datafile or dataset parameter will be deleted
# automatically with the dataset by cascading in the ICAT
# server.
query = Query(client, "Datafile",
conditions={"dataset.id": "= %d" % dataset.id,
"location": "IS NOT NULL"})
client.deleteData(client.search(query))
client.delete(dataset)
# Test datafiles to be created by test_ingest_datafiles:
testdatafiles = [
{
'dfname': "e208343.dat",
'size': 394,
'mtime': 1286600400,
},
{
'dfname': "e208343.nxs",
'size': 52857,
'mtime': 1286600400,
},
]
def verify_dataset_params(client, dataset, params):
query = Query(client, "DatasetParameter",
conditions={"dataset.id": "= %d" % dataset.id},
includes={"type"})
ps = client.search(query)
assert len(ps) == len(params)
values = { (p.type.name, p.numericValue, p.type.units) for p in ps }
assert values == params
def test_ingest_dataset_params(client, dataset, cmdargs):
"""Ingest a file setting some dataset parameters.
"""
dataset.create()
args = cmdargs + ["-i", ds_params]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 10.0, "MW"),
("Sample temperature", 293.15, "K")
})
def test_ingest_duplicate_throw(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now place a duplicate object in the way.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params]
# FIXME: should inspect stderr and verify ICATObjectExistsError.
with pytest.raises(CalledProcessError) as err:
callscript("icatingest.py", args)
# Verify that the params have been set. The exceptions should
# have been raised while trying to ingest the second parameter.
# The first one (Magnetic field) should have been created and
# Reactor power should still have the value set above.
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 5.0, "MW")
})
def test_ingest_duplicate_ignore(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now ignore the duplicate.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "IGNORE"]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 5.0, "MW"),
("Sample temperature", 293.15, "K")
})
def test_ingest_duplicate_check_err(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but use CHECK which fails due to mismatch.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "CHECK"]
# FIXME: should inspect stderr and verify ICATObjectExistsError.
with pytest.raises(CalledProcessError) as err:
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 5.0, "MW")
})
def test_ingest_duplicate_check_ok(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now it matches, so CHECK should return ok.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=10.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "CHECK"]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 10.0, "MW"),
("Sample temperature", 293.15, "K")
})
def test_ingest_duplicate_overwrite(client, dataset, cmdargs):
"""Ingest with a collision of a duplicate object.
Same test as above, but now overwrite the old value.
"""
dataset.create()
ptype = client.assertedSearch("ParameterType [name='Reactor power']")[0]
p = client.new("datasetParameter", numericValue=5.0,
dataset=dataset, type=ptype)
p.create()
args = cmdargs + ["-i", ds_params, "--duplicate", "OVERWRITE"]
callscript("icatingest.py", args)
verify_dataset_params(client, dataset, {
("Magnetic field", 5.3, "T"),
("Reactor power", 10.0, "MW"),
("Sample temperature", 293.15, "K")
})
# Minimal example, a Datafile featuring a string.
ingest_data_string = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datafile>
<name>dup_test_str.dat</name>
<dataset ref="Dataset_001"/>
</datafile>
</data>
</icatdata>
"""
# A Datafile featuring an int.
ingest_data_int = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datafile>
<fileSize>42</fileSize>
<name>dup_test_int.dat</name>
<dataset ref="Dataset_001"/>
</datafile>
</data>
</icatdata><|fim▁hole|>"""
# A Dataset featuring a boolean.
ingest_data_boolean = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<dataset id="Dataset_001">
<complete>false</complete>
<name>e208343</name>
<investigation name="10100601-ST" visitId="1.1-N"/>
<type name="raw"/>
</dataset>
</data>
</icatdata>
"""
# A DatasetParameter featuring a float.
ingest_data_float = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datasetParameter>
<numericValue>5.3</numericValue>
<dataset ref="Dataset_001"/>
<type name="Magnetic field" units="T"/>
</datasetParameter>
</data>
</icatdata>
"""
# A Datafile featuring a date.
ingest_data_date = """<?xml version="1.0" encoding="utf-8"?>
<icatdata>
<data>
<datasetRef id="Dataset_001"
name="e208343"
investigation.name="10100601-ST"
investigation.visitId="1.1-N"/>
<datafile>
<datafileCreateTime>2008-06-18T09:31:11+02:00</datafileCreateTime>
<name>dup_test_date.dat</name>
<dataset ref="Dataset_001"/>
</datafile>
</data>
</icatdata>
"""
@pytest.mark.parametrize("inputdata", [
ingest_data_string,
ingest_data_int,
ingest_data_boolean,
ingest_data_float,
ingest_data_date,
])
def test_ingest_duplicate_check_types(tmpdirsec, dataset, cmdargs, inputdata):
"""Ingest with a collision of a duplicate object.
Similar to test_ingest_duplicate_check_ok(), but trying several
input datasets that test different data types. Issue #9.
"""
# Most input data create a datafile or a dataset parameter related
# to dataset and thus assume the dataset to already exist. Only
# ingest_data_boolean creates the dataset itself.
if inputdata is not ingest_data_boolean:
dataset.create()
# We simply ingest twice the same data, using duplicate=CHECK the
# second time. This obviously leads to matching duplicates.
inpfile = tmpdirsec / "ingest.xml"
with inpfile.open("wt") as f:
f.write(inputdata)
args = cmdargs + ["-i", str(inpfile)]
callscript("icatingest.py", args)
callscript("icatingest.py", args + ["--duplicate", "CHECK"])
def test_ingest_datafiles(tmpdirsec, client, dataset, cmdargs):
"""Ingest a dataset with some datafiles.
"""
dummyfiles = [ f['dfname'] for f in testdatafiles ]
args = cmdargs + ["-i", datafiles]
callscript("icatingest.py", args)
# Verify that the datafiles have been uploaded.
dataset = client.searchMatching(dataset)
for fname in dummyfiles:
query = Query(client, "Datafile", conditions={
"name": "= '%s'" % fname,
"dataset.id": "= %d" % dataset.id,
})
df = client.assertedSearch(query)[0]
assert df.location is None
def test_ingest_datafiles_upload(tmpdirsec, client, dataset, cmdargs):
"""Upload datafiles to IDS from icatingest.
Same as last test, but set the --upload-datafiles flag so that
icatingest will not create the datafiles as objects in the ICAT,
but upload the files to IDS instead.
"""
dummyfiles = [ DummyDatafile(tmpdirsec, f['dfname'], f['size'], f['mtime'])
for f in testdatafiles ]
args = cmdargs + ["-i", datafiles, "--upload-datafiles",
"--datafile-dir", str(tmpdirsec)]
callscript("icatingest.py", args)
# Verify that the datafiles have been uploaded.
dataset = client.searchMatching(dataset)
for f in dummyfiles:
query = Query(client, "Datafile", conditions={
"name": "= '%s'" % f.name,
"dataset.id": "= %d" % dataset.id,
})
df = client.assertedSearch(query)[0]
assert df.location is not None
assert df.fileSize == f.size
assert df.checksum == f.crc32
if f.mtime:
assert df.datafileModTime == f.mtime<|fim▁end|>
| |
<|file_name|>subtrees_comparer.py<|end_file_name|><|fim▁begin|>def subtrees_equal(expected_schema_node, actual_node):
if expected_schema_node[0] != actual_node.get_name():
return False
if expected_schema_node[1] != actual_node.get_state():
return False
expected_children = expected_schema_node[2]
actual_children = actual_node.get_children()
actual_children_names = [child.get_name() for child in actual_children]
actual_children_names.sort()
if len(expected_children) != len(actual_children_names):<|fim▁hole|> zip(expected_children, actual_children_names):
subtrees_equal(
expected_child, actual_node.get_child(actual_child_name))
return True<|fim▁end|>
|
return False
for (expected_child, actual_child_name) in \
|
<|file_name|>GenericTIFFSeriesMaximumIntensityProjectionGenerationAlgorithm.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
'''
Created on Apr 27, 2016
@author: Aaron Ponti
'''
from ch.systemsx.cisd.openbis.dss.etl.dto.api.impl import MaximumIntensityProjectionGenerationAlgorithm
class GenericTIFFSeriesMaximumIntensityProjectionGenerationAlgorithm(MaximumIntensityProjectionGenerationAlgorithm):
'''
Custom MaximumIntensityProjectionGenerationAlgorithm for Generic TIFF Series
that makes sure that the first timepoint in a series is registered for
creation of the representative thumbnail.
'''
def __init__(self, datasetTypeCode, width, height, filename):
"""
Constructor
"""
<|fim▁hole|>
def imageToBeIgnored(self, image):
"""
Overrides the parent imageToBeIgnored method. The selection of which
series should be used to create the representative thumbnail is done
in GenericTIFFSeriesCompositeDatasetConfig. Here we prevent the base
MaximumIntensityProjectionGenerationAlgorithm.imageToBeIgnored() method
to make a decision based on the timepoint (== 0), since we cannot know
which is the first time point in a Generic TIFF Series.
"""
return False<|fim▁end|>
|
# Call the parent base constructor
MaximumIntensityProjectionGenerationAlgorithm.__init__(self,
datasetTypeCode, width, height, filename)
|
<|file_name|>timer.rs<|end_file_name|><|fim▁begin|>// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
use base::prelude::*;
use core::{mem};
use cty::{c_int, itimerspec, TFD_TIMER_ABSTIME};
use syscall::{close, timerfd_settime, timerfd_gettime, read};
use fd::{FdContainer};
use rv::{retry};
use super::{Time, time_to_timespec, time_from_timespec};
/// A timer.
pub struct Timer {
fd: c_int,
owned: bool,
}
impl Timer {
/// Disables the timer.
pub fn disable(&self) -> Result {
let arg = mem::zeroed();
rv!(timerfd_settime(self.fd, 0, &arg, None))
}
/// Sets the timer to expire every `iv` time units.
pub fn interval(&self, iv: Time) -> Result {
let arg = itimerspec {
it_interval: time_to_timespec(iv),
it_value: time_to_timespec(iv),
};
rv!(timerfd_settime(self.fd, 0, &arg, None))
}
/// Sets the timer to expire every `iv` time units, starting at the absolute `start`.
pub fn interval_from(&self, iv: Time, start: Time) -> Result {
let arg = itimerspec {
it_interval: time_to_timespec(iv),
it_value: time_to_timespec(start),
};
rv!(timerfd_settime(self.fd, TFD_TIMER_ABSTIME, &arg, None))
}
/// Sets the timer to expire every `iv` time units, starting in `when` units.
pub fn interval_in(&self, iv: Time, when: Time) -> Result {
let arg = itimerspec {
it_interval: time_to_timespec(iv),
it_value: time_to_timespec(when),
};
rv!(timerfd_settime(self.fd, 0, &arg, None))
}
/// Sets the timer to expire once at the absolute `when`.
pub fn once_at(&self, when: Time) -> Result {
let arg = itimerspec {
it_interval: mem::zeroed(),
it_value: time_to_timespec(when),
};
rv!(timerfd_settime(self.fd, TFD_TIMER_ABSTIME, &arg, None))
}
/// Sets the timer to expire in `when` time units.
pub fn once_in(&self, when: Time) -> Result {
let arg = itimerspec {
it_interval: mem::zeroed(),
it_value: time_to_timespec(when),
};
rv!(timerfd_settime(self.fd, 0, &arg, None))
}
/// Returns the status of the timer.
///
/// TODO: Document this.
pub fn status(&self) -> Result<(Time, Time)> {
let mut arg = mem::zeroed();
try!(rv!(timerfd_gettime(self.fd, &mut arg)));
Ok((time_from_timespec(arg.it_interval), time_from_timespec(arg.it_value)))
}
/// Returns the number of times the timer expired since this function was last called.
pub fn ticks(&self) -> Result<u64> {
let mut buf = 0;
try!(retry(|| read(self.fd, buf.as_mut())));
Ok(buf)
}
}
<|fim▁hole|>impl Drop for Timer {
fn drop(&mut self) {
if self.owned {
close(self.fd);
}
}
}
impl Into<c_int> for Timer {
fn into(self) -> c_int {
let fd = self.fd;
mem::forget(self);
fd
}
}
impl FdContainer for Timer {
fn is_owned(&self) -> bool {
self.owned
}
fn borrow(&self) -> c_int {
self.fd
}
fn from_owned(fd: c_int) -> Timer {
Timer { fd: fd, owned: true }
}
fn from_borrowed(fd: c_int) -> Timer {
Timer { fd: fd, owned: false }
}
}<|fim▁end|>
| |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import profiles.urls
import accounts.urls
from . import views<|fim▁hole|> url(r'^$', views.HomePage.as_view(), name='home'),
url(r'^users/', include(profiles.urls, namespace='profiles')),
url(r'^admin/', include(admin.site.urls)),
url(r'^', include(accounts.urls, namespace='accounts')),
url(r'^post_url/$', views.HomePage.as_view(), name='post')
]
# User-uploaded files like profile pics need to be served in development
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Include django debug toolbar if DEBUG is on
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]<|fim▁end|>
|
urlpatterns = [
|
<|file_name|>ForumContainer.js<|end_file_name|><|fim▁begin|>import React, {Component, PropTypes} from 'react';
import * as actions from './ForumAction';
import ForumPage from './ForumPage';
class ForumContainer extends Component {
constructor(props) {
super(props);
this.state = {
questions: []
};<|fim▁hole|> this.postQuestion = this.postQuestion.bind(this);
}
postQuestion(model) {
actions.postQuestion(model).then(response => {
const questions = this.state.questions.concat([response]);
this.setState({questions});
});
}
componentDidMount() {
actions.getQuestions().then(response => {
this.setState({questions: response});
});
}
render() {
return <ForumPage {...this.state} postQuestion={this.postQuestion}/>;
}
}
export default ForumContainer;<|fim▁end|>
| |
<|file_name|>contact-us.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core';
<|fim▁hole|> templateUrl: './contact-us.component.html',
styleUrls: ['./contact-us.component.less']
})
export class ContactUsComponent implements OnInit {
constructor() {
// Do stuff
}
ngOnInit() {
}
}<|fim▁end|>
|
@Component({
selector: 'nymcp-contact-us',
|
<|file_name|>app.js<|end_file_name|><|fim▁begin|>'use strict';
jQuery(document).ready(function ($) {
var lastId,
topMenu = $("#top-navigation"),
topMenuHeight = topMenu.outerHeight(),
// All list items
menuItems = topMenu.find("a"),
// Anchors corresponding to menu items
scrollItems = menuItems.map(function () {
var item = $($(this).attr("href"));
if (item.length) {
return item;
}
});
//Get width of container
var containerWidth = $('.section .container').width();
//Resize animated triangle
$(".triangle").css({
"border-left": containerWidth / 2 + 'px outset transparent',
"border-right": containerWidth / 2 + 'px outset transparent'
});
//resize
$(window).resize(function () {
containerWidth = $('.container').width();
$(".triangle").css({
"border-left": containerWidth / 2 + 'px outset transparent',
"border-right": containerWidth / 2 + 'px outset transparent'
});
});
//mask
$('.has-mask').each( function() {
var $this = $( this ),
mask = $this.data( 'aria-mask' );
$this.mask( mask );
});
//Initialize header slider.
$('#da-slider').cslider();
//Initial mixitup, used for animated filtering portgolio.
$('#portfolio-grid').mixitup({
'onMixStart': function (config) {
$('div.toggleDiv').hide();
}
});
//Initial Out clients slider in client section
$('#clint-slider').bxSlider({
pager: false,
minSlides: 1,
maxSlides: 5,
moveSlides: 2,
slideWidth: 210,
slideMargin: 25,
prevSelector: $('#client-prev'),
nextSelector: $('#client-next'),
prevText: '<i class="icon-left-open"></i>',
nextText: '<i class="icon-right-open"></i>'
});
$('input, textarea').placeholder();
// Bind to scroll
$(window).scroll(function () {
//Display or hide scroll to top button
if ($(this).scrollTop() > 100) {
$('.scrollup').fadeIn();
} else {
$('.scrollup').fadeOut();
}
if ($(this).scrollTop() > 100) {
$('.navbar').addClass('navbar-fixed-top animated fadeInDown');
} else {
$('.navbar').removeClass('navbar-fixed-top animated fadeInDown');
}
// Get container scroll position
var fromTop = $(this).scrollTop() + topMenuHeight + 10;
// Get id of current scroll item
var cur = scrollItems.map(function () {
if ($(this).offset().top < fromTop)
return this;
});
// Get the id of the current element
cur = cur[cur.length - 1];
var id = cur && cur.length ? cur[0].id : "";
if (lastId !== id) {
lastId = id;
// Set/remove active class<|fim▁hole|> .parent().removeClass("active")
.end().filter("[href=#" + id + "]").parent().addClass("active");
}
});
/*
Function for scroliing to top
************************************/
$('.scrollup').click(function () {
$("html, body").animate({
scrollTop: 0
}, 600);
return false;
});
/*
Sand newsletter
**********************************************************************/
$('#subscribe').click(function () {
var error = false;
var emailCompare = /^([a-z0-9_.-]+)@([0-9a-z.-]+).([a-z.]{2,6})$/; // Syntax to compare against input
var email = $('input#nlmail').val().toLowerCase(); // get the value of the input field
if (email == "" || email == " " || !emailCompare.test(email)) {
$('#err-subscribe').show(500);
$('#err-subscribe').delay(4000);
$('#err-subscribe').animate({
height: 'toggle'
}, 500, function () {
// Animation complete.
});
error = true; // change the error state to true
}
if (error === false) {
$.ajax({
type: 'POST',
url: 'php/newsletter.php',
data: {
email: $('#nlmail').val()
},
error: function (request, error) {
alert("An error occurred");
},
success: function (response) {
if (response == 'OK') {
$('#success-subscribe').show();
$('#nlmail').val('')
} else {
alert("An error occurred");
}
}
});
}
return false;
});
/*
Sand mail
**********************************************************************/
$("#send-mail").click(function () {
var name = $('input#name').val(); // get the value of the input field
var error = false;
if (name == "" || name == " ") {
$('#err-name').show(500);
$('#err-name').delay(4000);
$('#err-name').animate({
height: 'toggle'
}, 500, function () {
// Animation complete.
});
error = true; // change the error state to true
}
var emailCompare = /^([a-z0-9_.-]+)@([da-z.-]+).([a-z.]{2,6})$/; // Syntax to compare against input
var email = $('input#email').val().toLowerCase(); // get the value of the input field
if (email == "" || email == " " || !emailCompare.test(email)) {
$('#err-email').show(500);
$('#err-email').delay(4000);
$('#err-email').animate({
height: 'toggle'
}, 500, function () {
// Animation complete.
});
error = true; // change the error state to true
}
var comment = $('textarea#comment').val(); // get the value of the input field
if (comment == "" || comment == " ") {
$('#err-comment').show(500);
$('#err-comment').delay(4000);
$('#err-comment').animate({
height: 'toggle'
}, 500, function () {
// Animation complete.
});
error = true; // change the error state to true
}
if (error == false) {
var dataString = $('#contact-form').serialize(); // Collect data from form
$.ajax({
type: "POST",
url: $('#contact-form').attr('action'),
data: dataString,
timeout: 6000,
error: function (request, error) {
},
success: function (response) {
response = $.parseJSON(response);
if (response.success) {
$('#successSend').show();
$("#name").val('');
$("#email").val('');
$("#comment").val('');
} else {
$('#errorSend').show();
}
}
});
return false;
}
return false; // stops user browser being directed to the php file
});
//Function for show or hide portfolio desctiption.
$.fn.showHide = function (options) {
var defaults = {
speed: 1000,
easing: '',
changeText: 0,
showText: 'Show',
hideText: 'Hide'
};
var options = $.extend(defaults, options);
$(this).click(function () {
$('.toggleDiv').slideUp(options.speed, options.easing);
var toggleClick = $(this);
var toggleDiv = $(this).attr('rel');
$(toggleDiv).slideToggle(options.speed, options.easing, function () {
if (options.changeText == 1) {
$(toggleDiv).is(":visible") ? toggleClick.text(options.hideText) : toggleClick.text(options.showText);
}
});
return false;
});
};
//Initial Show/Hide portfolio element.
$('div.toggleDiv').hide();
/************************
Animate elements
*************************/
//Animate thumbnails
jQuery('.thumbnail').one('inview', function (event, visible) {
if (visible == true) {
jQuery(this).addClass("animated fadeInDown");
} else {
jQuery(this).removeClass("animated fadeInDown");
}
});
//Animate triangles
jQuery('.triangle').bind('inview', function (event, visible) {
if (visible == true) {
jQuery(this).addClass("animated fadeInDown");
} else {
jQuery(this).removeClass("animated fadeInDown");
}
});
//animate first team member
jQuery('#first-person').bind('inview', function (event, visible) {
if (visible == true) {
jQuery('#first-person').addClass("animated pulse");
} else {
jQuery('#first-person').removeClass("animated pulse");
}
});
//animate sectond team member
jQuery('#second-person').bind('inview', function (event, visible) {
if (visible == true) {
jQuery('#second-person').addClass("animated pulse");
} else {
jQuery('#second-person').removeClass("animated pulse");
}
});
//animate thrid team member
jQuery('#third-person').bind('inview', function (event, visible) {
if (visible == true) {
jQuery('#third-person').addClass("animated pulse");
} else {
jQuery('#third-person').removeClass("animated pulse");
}
});
//Animate price columns
jQuery('.price-column, .testimonial').bind('inview', function (event, visible) {
if (visible == true) {
jQuery(this).addClass("animated fadeInDown");
} else {
jQuery(this).removeClass("animated fadeInDown");
}
});
//Animate contact form
jQuery('.contact-form').bind('inview', function (event, visible) {
if (visible == true) {
jQuery('.contact-form').addClass("animated bounceIn");
} else {
jQuery('.contact-form').removeClass("animated bounceIn");
}
});
//Animate skill bars
jQuery('.skills > li > span').one('inview', function (event, visible) {
if (visible == true) {
jQuery(this).each(function () {
jQuery(this).animate({
width: jQuery(this).attr('data-width')
}, 3000);
});
}
});
//initialize pluging
$("a[rel^='prettyPhoto']").prettyPhoto();
//contact form
function enviainfocurso()
{
$.ajax({
type: "POST",
url: "php/phpscripts/infocurso.php",
async: true,
data: $('form.informacao').serialize(),
});
alert('Entraremos em breve em contato com você');
}
//analytics
(function (i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r;
i[r] = i[r] || function () {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date();
a = s.createElement(o),
m = s.getElementsByTagName(o)[0];
a.async = 1;
a.src = g;
m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'ga');
ga('create', 'UA-62932901-1', 'auto');
ga('send', 'pageview');
}); // end ready()
$(window).load(function () {
function filterPath(string) {
return string.replace(/^\//, '').replace(/(index|default).[a-zA-Z]{3,4}$/, '').replace(/\/$/, '');
}
$('a[href*=#]').each(function () {
if (filterPath(location.pathname) == filterPath(this.pathname) && location.hostname == this.hostname && this.hash.replace(/#/, '')) {
var $targetId = $(this.hash),
$targetAnchor = $('[name=' + this.hash.slice(1) + ']');
var $target = $targetId.length ? $targetId : $targetAnchor.length ? $targetAnchor : false;
if ( $target ) {
$(this).click(function (e) {
e.preventDefault();
//Hack collapse top navigation after clicking
// topMenu.parent().attr('style', 'height:0px').removeClass('in'); //Close navigation
$('.navbar .btn-navbar').addClass('collapsed');
var targetOffset = $target.offset().top - 63;
$('html, body').animate({
scrollTop: targetOffset
}, 800);
return false;
});
}
}
});
});
//Initialize google map for contact setion with your location.
function initializeMap() {
if( $("#map-canvas").length ) {
var lat = '-8.0618743';//-8.055967'; //Set your latitude.
var lon = '-34.8734548';//'-34.896303'; //Set your longitude.
var centerLon = lon - 0.0105;
var myOptions = {
scrollwheel: false,
draggable: false,
disableDefaultUI: true,
center: new google.maps.LatLng(lat, centerLon),
zoom: 15,
mapTypeId: google.maps.MapTypeId.ROADMAP
};
//Bind map to elemet with id map-canvas
var map = new google.maps.Map(document.getElementById('map-canvas'), myOptions);
var marker = new google.maps.Marker({
map: map,
position: new google.maps.LatLng(lat, lon),
});
//var infowindow = new google.maps.InfoWindow();
//google.maps.event.addListener(marker, 'click', function () {
// infowindow.open(map, marker);
//});
// infowindow.open(map, marker);
}
}<|fim▁end|>
|
menuItems
|
<|file_name|>ranges_iter.rs<|end_file_name|><|fim▁begin|>// Copyright 2019 TiKV Project Authors. Licensed under Apache-2.0.<|fim▁hole|>use super::range::Range;
#[derive(PartialEq, Eq, Clone, Debug)]
pub enum IterStatus {
/// All ranges are consumed.
Drained,
/// Last range is drained or this iteration is a fresh start so that caller should scan
/// on a new range.
NewRange(Range),
/// Last interval range is not drained and the caller should continue scanning without changing
/// the scan range.
Continue,
}
/// An iterator like structure that produces user key ranges.
///
/// For each `next()`, it produces one of the following:
/// - a new range
/// - a flag indicating continuing last interval range
/// - a flag indicating that all ranges are consumed
///
/// If a new range is returned, caller can then scan unknown amount of key(s) within this new range.
/// The caller must inform the structure so that it will emit a new range next time by calling
/// `notify_drained()` after current range is drained. Multiple `notify_drained()` without `next()`
/// will have no effect.
pub struct RangesIterator {
/// Whether or not we are processing a valid range. If we are not processing a range, or there
/// is no range any more, this field is `false`.
in_range: bool,
iter: std::vec::IntoIter<Range>,
}
impl RangesIterator {
#[inline]
pub fn new(user_key_ranges: Vec<Range>) -> Self {
Self {
in_range: false,
iter: user_key_ranges.into_iter(),
}
}
/// Continues iterating.
#[inline]
pub fn next(&mut self) -> IterStatus {
if self.in_range {
return IterStatus::Continue;
}
match self.iter.next() {
None => IterStatus::Drained,
Some(range) => {
self.in_range = true;
IterStatus::NewRange(range)
}
}
}
/// Notifies that current range is drained.
#[inline]
pub fn notify_drained(&mut self) {
self.in_range = false;
}
}
#[cfg(test)]
mod tests {
use super::super::range::IntervalRange;
use super::*;
use std::sync::atomic;
static RANGE_INDEX: atomic::AtomicU64 = atomic::AtomicU64::new(1);
fn new_range() -> Range {
use byteorder::{BigEndian, WriteBytesExt};
let v = RANGE_INDEX.fetch_add(2, atomic::Ordering::SeqCst);
let mut r = IntervalRange::from(("", ""));
r.lower_inclusive.write_u64::<BigEndian>(v).unwrap();
r.upper_exclusive.write_u64::<BigEndian>(v + 2).unwrap();
Range::Interval(r)
}
#[test]
fn test_basic() {
// Empty
let mut c = RangesIterator::new(vec![]);
assert_eq!(c.next(), IterStatus::Drained);
assert_eq!(c.next(), IterStatus::Drained);
c.notify_drained();
assert_eq!(c.next(), IterStatus::Drained);
assert_eq!(c.next(), IterStatus::Drained);
// Non-empty
let ranges = vec![new_range(), new_range(), new_range()];
let mut c = RangesIterator::new(ranges.clone());
assert_eq!(c.next(), IterStatus::NewRange(ranges[0].clone()));
assert_eq!(c.next(), IterStatus::Continue);
assert_eq!(c.next(), IterStatus::Continue);
c.notify_drained();
assert_eq!(c.next(), IterStatus::NewRange(ranges[1].clone()));
assert_eq!(c.next(), IterStatus::Continue);
assert_eq!(c.next(), IterStatus::Continue);
c.notify_drained();
c.notify_drained(); // multiple consumes will not take effect
assert_eq!(c.next(), IterStatus::NewRange(ranges[2].clone()));
c.notify_drained();
assert_eq!(c.next(), IterStatus::Drained);
c.notify_drained();
assert_eq!(c.next(), IterStatus::Drained);
}
}<|fim▁end|>
| |
<|file_name|>ValueFactory.java<|end_file_name|><|fim▁begin|>/* $Id$
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.etch.bindings.java.msg;
import java.util.Set;
import org.apache.etch.bindings.java.msg.Validator.Level;
/**
* Interface which defines the value factory which helps
* the idl compiler serialize and deserialize messages,
* convert values, etc.
*/
public interface ValueFactory
{
//////////
// Type //
//////////
/**
* Translates a type id into the appropriate Type object. If the type does
* not exist, and if dynamic typing is enabled, adds it to the dynamic types.
* @param id a type id.
* @return id translated into the appropriate Type.
*/
public Type getType( Integer id );
/**
* Translates a type name into the appropriate Type object. If the type does
* not exist, and if dynamic typing is enabled, adds it to the dynamic types.
* @param name a type name.
* @return name translated into the appropriate Type.
*/
public Type getType( String name );
/**
* Adds the type if it doesn't already exist. Use this to dynamically add
* types to a ValueFactory. The type is per instance of the ValueFactory,
* not global. Not available if dynamic typing is locked.
* @param type
*/
public void addType( Type type );
/**
* Locks the dynamic typing so that no new types may be created by addType
* or getType.
*/
public void lockDynamicTypes();
/**
* Unlocks the dynamic typing so that new types may be created by addType
* or getType.
*/
public void unlockDynamicTypes();
/**
* @return a collection of all the types.
*/
public Set<Type> getTypes();
/////////////////////
// STRING ENCODING //
/////////////////////
/**
* @return the encoding to use for strings.
*/
public String getStringEncoding();
////////////////
// MESSAGE ID //
////////////////
/**
* @param msg the message whose well-known message-id field is to be
* returned.
* @return the value of the well-known message-id field. This is a
* unique identifier for this message on a particular transport
* during a particular session. If there is no well-known message-id
* field defined, or if the message-id field has not been set, then
* return null.
*/
public Long getMessageId( Message msg );
/**
* Sets the value of the well-known message-id field. This is a
* unique identifier for this message on a particular transport
* during a particular session. If there is no well-known message-id
* field defined then nothing is done. If msgid is null, then the
* field is cleared.
* @param msg the message whose well-known message-id field is to
* be set.
* @param msgid the value of the well-known message-id field.
*/
public void setMessageId( Message msg, Long msgid );
/**
* @return well-known message field for message id.
*/
public Field get_mf__messageId();
/////////////////
// IN REPLY TO //
/////////////////
/**
* @param msg the message whose well-known in-reply-to field is to
* be returned.
* @return the value of the in-reply-to field, or null if there is
* none or if there is no such field defined.
*/
public Long getInReplyTo( Message msg );
<|fim▁hole|> * @param msgid the value of the well-known in-reply-to field. If
* there is no well-known in-reply-to field defined then nothing
* is done. If msgid is null, then the field is cleared.
*/
public void setInReplyTo( Message msg, Long msgid );
/**
* @return well-known message field for in reply to.
*/
public Field get_mf__inReplyTo();
//////////////////////
// VALUE CONVERSION //
//////////////////////
/**
* Converts a value to a struct value representation to be exported
* to a tagged data output.
* @param value a custom type defined by a service, or a well-known
* standard type (e.g., date).
* @return a struct value representing the value.
* @throws UnsupportedOperationException if the type cannot be exported.
*/
public StructValue exportCustomValue( Object value )
throws UnsupportedOperationException;
/**
* Converts a struct value imported from a tagged data input to
* a normal type.
* @param struct a struct value representation of a custom type, or a
* well known standard type.
* @return a custom type, or a well known standard type.
* @throws UnsupportedOperationException if the type cannot be imported.
*/
public Object importCustomValue( StructValue struct )
throws UnsupportedOperationException;
/**
* @param c the class of a custom value.
* @return the struct type of a custom value class.
* @throws UnsupportedOperationException
* @see #exportCustomValue(Object)
*/
public Type getCustomStructType( Class<?> c )
throws UnsupportedOperationException;
/**
* @return well-known message type for exception thrown by one-way
* message.
*/
public Type get_mt__exception();
/**
* @return the validation level of field StructValue.put and TaggedDataOutput.
*/
public Level getLevel();
/**
* Sets the validation level of field StructValue.put and TaggedDataOutput.
* @param level
* @return the old value
*/
public Level setLevel( Level level );
}<|fim▁end|>
|
/**
* @param msg the message whose well-known in-reply-to field is to
* be set.
|
<|file_name|>basic.rs<|end_file_name|><|fim▁begin|>use ci::{build, Command, Sandbox};
use std::path::Path;
use std::{process, fs};
<|fim▁hole|>{
fn run(&mut self, command: Command, working_dir: &Path) -> build::TaskOutput {
if !working_dir.exists() {
fs::create_dir_all(&working_dir).expect("could not create ci directory");
}
let output = process::Command::new(&command.executable)
.args(&command.arguments)
.current_dir(working_dir)
.output()
.expect("could not spawn command");
let output = build::TaskOutput {
// FIXME: grab stderr
output: output.stdout,
result_code: match output.status.code() {
Some(code) => code as _,
None => 0,
},
};
output
}
}<|fim▁end|>
|
pub struct Basic;
impl Sandbox for Basic
|
<|file_name|>race_storage.py<|end_file_name|><|fim▁begin|>import copy
import secrets
<|fim▁hole|> '🐶': 0xccd6dd,
'🐱': 0xffcb4e,
'🐭': 0x99aab5,
'🐰': 0x99aab5,
'🐙': 0x9266cc,
'🐠': 0xffcc4d,
'🦊': 0xf4900c,
'🦀': 0xbe1931,
'🐸': 0x77b255,
'🐧': 0xf5f8fa
}
names = {
'🐶': 'dog',
'🐱': 'cat',
'🐭': 'mouse',
'🐰': 'rabbit',
'🐙': 'octopus',
'🐠': 'fish',
'🦊': 'fox',
'🦀': 'crab',
'🐸': 'frog',
'🐧': 'penguin'
}
participant_icons = ['🐶', '🐱', '🐭', '🐰', '🐙', '🐠', '🦊', '🦀', '🐸', '🐧']
def make_race(channel_id, buyin):
icon_copy = copy.deepcopy(participant_icons)
race_data = {
'icons': icon_copy,
'users': [],
'buyin': buyin
}
races.update({channel_id: race_data})
def add_participant(channel_id, user):
race = races[channel_id]
icons = race['icons']
users = race['users']
usr_icon = secrets.choice(icons)
icons.remove(usr_icon)
race.update({'icons': icons})
participant_data = {
'user': user,
'icon': usr_icon
}
users.append(participant_data)
race.update({'users': users})
races.update({channel_id: race})
return usr_icon<|fim▁end|>
|
races = {}
colors = {
|
<|file_name|>0005_auto__del_field_artistmedia_is_default_image__del_field_artistmedia_na.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'ArtistMedia.is_default_image'
db.delete_column(u'artist_artistmedia', 'is_default_image')
# Deleting field 'ArtistMedia.name'
db.delete_column(u'artist_artistmedia', 'name')
# Deleting field 'ArtistMedia.video_link'
db.delete_column(u'artist_artistmedia', 'video_link')
# Deleting field 'ArtistMedia.full_res_image'
db.delete_column(u'artist_artistmedia', 'full_res_image')
# Deleting field 'ArtistMedia.image'
db.delete_column(u'artist_artistmedia', 'image')
# Deleting field 'ArtistMedia.id'
db.delete_column(u'artist_artistmedia', u'id')
# Deleting field 'ArtistMedia.thumbnail'
db.delete_column(u'artist_artistmedia', 'thumbnail')
# Adding field 'ArtistMedia.frontmedia_ptr'
db.add_column(u'artist_artistmedia', u'frontmedia_ptr',
self.gf('django.db.models.fields.related.OneToOneField')(default=-1, to=orm['front_material.FrontMedia'], unique=True, primary_key=True),
keep_default=False)
def backwards(self, orm):
# Adding field 'ArtistMedia.is_default_image'
db.add_column(u'artist_artistmedia', 'is_default_image',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
# Adding field 'ArtistMedia.name'
db.add_column(u'artist_artistmedia', 'name',
self.gf('django.db.models.fields.CharField')(default='', max_length=100),
keep_default=False)
# Adding field 'ArtistMedia.video_link'
db.add_column(u'artist_artistmedia', 'video_link',
self.gf('django.db.models.fields.CharField')(default='', max_length=255, null=True, blank=True),
keep_default=False)
# Adding field 'ArtistMedia.full_res_image'
db.add_column(u'artist_artistmedia', 'full_res_image',
self.gf('django.db.models.fields.files.ImageField')(default='', max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'ArtistMedia.image'
db.add_column(u'artist_artistmedia', 'image',
self.gf('django.db.models.fields.files.ImageField')(default='', max_length=100, null=True, blank=True),
keep_default=False)
# Adding field 'ArtistMedia.id'
db.add_column(u'artist_artistmedia', u'id',
self.gf('django.db.models.fields.AutoField')(default=1, primary_key=True),
keep_default=False)
# Adding field 'ArtistMedia.thumbnail'
db.add_column(u'artist_artistmedia', 'thumbnail',
self.gf('django.db.models.fields.files.ImageField')(default='', max_length=100, null=True, blank=True),
keep_default=False)
# Deleting field 'ArtistMedia.frontmedia_ptr'
db.delete_column(u'artist_artistmedia', u'frontmedia_ptr_id')
models = {
u'artist.artist': {
'Meta': {'object_name': 'Artist'},
'artist_statement': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
'bio': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'slug': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'artist.artistmedia': {
'Meta': {'object_name': 'ArtistMedia', '_ormbases': [u'front_material.FrontMedia']},
'artist': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['artist.Artist']"}),
u'frontmedia_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['front_material.FrontMedia']", 'unique': 'True', 'primary_key': 'True'})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},<|fim▁hole|> u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'front_material.frontmedia': {
'Meta': {'object_name': 'FrontMedia'},
'full_res_image': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'is_default_image': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100'}),
'thumbnail': ('django.db.models.fields.files.ImageField', [], {'default': "''", 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'video_link': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['artist']<|fim▁end|>
| |
<|file_name|>index.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|> console.log('Razzle comes with React! This package is a stub.');
return config;
};<|fim▁end|>
|
module.exports = function modify(config) {
|
<|file_name|>test_gravmag_euler.py<|end_file_name|><|fim▁begin|>from __future__ import division
import numpy as np
from fatiando.gravmag.euler import Classic, ExpandingWindow, MovingWindow
from fatiando.gravmag import sphere, fourier
from fatiando.mesher import Sphere
from fatiando import utils, gridder
model = None
xp, yp, zp = None, None, None
inc, dec = None, None
struct_ind = None
base = None
pos = None
field, xderiv, yderiv, zderiv = None, None, None, None
precision = 0.01
def setup():
global model, x, y, z, inc, dec, struct_ind, field, xderiv, yderiv, \
zderiv, base, pos
inc, dec = -30, 50
pos = np.array([1000, 1000, 200])
model = Sphere(pos[0], pos[1], pos[2], 1,
#{'magnetization':utils.ang2vec(100, 25, -10)})
{'magnetization':10000})
struct_ind = 3
shape = (128, 128)
x, y, z = gridder.regular((0, 3000, 0, 3000), shape, z=-1)
base = 10
field = utils.nt2si(sphere.tf(x, y, z, [model], inc, dec)) + base
xderiv = fourier.derivx(x, y, field, shape)
yderiv = fourier.derivy(x, y, field, shape)
zderiv = fourier.derivz(x, y, field, shape)
def test_euler_classic_sphere_mag():
"gravmag.euler.Classic for sphere model and magnetic data"
euler = Classic(x, y, z, field, xderiv, yderiv, zderiv, struct_ind).fit()<|fim▁hole|> assert np.all((pos - euler.estimate_)/pos <= precision), \
'position: %s estimated: %s' % (str(pos), str(euler.estimate_))
def test_euler_classic_expandingwindow_sphere_mag():
"gravmag.euler.ExpandingWindow w Classic for sphere model + magnetic data"
euler = ExpandingWindow(
Classic(x, y, z, field, xderiv, yderiv, zderiv, struct_ind),
center=[1000, 1000], sizes=np.linspace(100, 2000, 20)).fit()
assert (base - euler.baselevel_)/base <= precision, \
'baselevel: %g estimated: %g' % (base, euler.baselevel_)
assert np.all((pos - euler.estimate_)/pos <= precision), \
'position: %s estimated: %s' % (str(pos), str(euler.estimate_))
def test_euler_classic_movingwindow_sphere_mag():
"gravmag.euler.MovingWindow w Classic for sphere model + magnetic data"
euler = MovingWindow(
Classic(x, y, z, field, xderiv, yderiv, zderiv, struct_ind),
windows=[10, 10], size=(1000, 1000), keep=0.2).fit()
for b in euler.baselevel_:
assert (base - b)/base <= precision, \
'baselevel: %g estimated: %g' % (base, b)
for c in euler.estimate_:
assert np.all((pos - c)/pos <= precision), \
'position: %s estimated: %s' % (str(pos), str(c))<|fim▁end|>
|
assert (base - euler.baselevel_)/base <= precision, \
'baselevel: %g estimated: %g' % (base, euler.baselevel_)
|
<|file_name|>migration_context.py<|end_file_name|><|fim▁begin|># Copyright 2015 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from oslo_utils import versionutils
from nova.db import api as db
from nova import exception
from nova.objects import base
from nova.objects import fields
@base.NovaObjectRegistry.register<|fim▁hole|> Some resources cannot be calculated from knowing the flavor alone for the
purpose of resources tracking, but need to be persisted at the time the
claim was made, for subsequent resource tracking runs to be consistent.
MigrationContext objects are created when the claim is done and are there
to facilitate resource tracking and final provisioning of the instance on
the destination host.
"""
# Version 1.0: Initial version
# Version 1.1: Add old/new pci_devices and pci_requests
VERSION = '1.1'
fields = {
'instance_uuid': fields.UUIDField(),
'migration_id': fields.IntegerField(),
'new_numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'old_numa_topology': fields.ObjectField('InstanceNUMATopology',
nullable=True),
'new_pci_devices': fields.ObjectField('PciDeviceList',
nullable=True),
'old_pci_devices': fields.ObjectField('PciDeviceList',
nullable=True),
'new_pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
'old_pci_requests': fields.ObjectField('InstancePCIRequests',
nullable=True),
}
@classmethod
def obj_make_compatible(cls, primitive, target_version):
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (1, 1):
primitive.pop('old_pci_devices', None)
primitive.pop('new_pci_devices', None)
primitive.pop('old_pci_requests', None)
primitive.pop('new_pci_requests', None)
@classmethod
def obj_from_db_obj(cls, db_obj):
primitive = jsonutils.loads(db_obj)
return cls.obj_from_primitive(primitive)
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_extra = db.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=['migration_context'])
if not db_extra:
raise exception.MigrationContextNotFound(
instance_uuid=instance_uuid)
if db_extra['migration_context'] is None:
return None
return cls.obj_from_db_obj(db_extra['migration_context'])<|fim▁end|>
|
class MigrationContext(base.NovaPersistentObject, base.NovaObject):
"""Data representing additional resources related to a migration.
|
<|file_name|>karma.config.js<|end_file_name|><|fim▁begin|>var webpackConfig = require('./webpack.local.config.js');
<|fim▁hole|>
// base path that will be used to resolve all patterns (eg. files, exclude)
basePath: '',
// frameworks to use
// available frameworks: https://npmjs.org/browse/keyword/karma-adapter
frameworks: ['mocha'],
// list of files / patterns to load in the browser
files: [
'../js/src/test_index.js'
],
// list of files to exclude
exclude: [],
// preprocess matching files before serving them to the browser
// available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor
preprocessors: {
'../js/src/test_index.js': ['webpack', 'sourcemap'],
},
// test results reporter to use
// possible values: 'dots', 'progress'
// available reporters: https://npmjs.org/browse/keyword/karma-reporter
reporters: ['progress'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_INFO,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
autoWatchBatchDelay: 300,
// start these browsers
// available browser launchers: https://npmjs.org/browse/keyword/karma-launcher
browsers: ['Chrome'],
// Continuous Integration mode
// if true, Karma captures browsers, runs the tests and exits
singleRun: false,
// Concurrency level
// how many browser should be started simultaneous
concurrency: Infinity,
// Webpack
webpack: webpackConfig,
webpackServer: {
noInfo: true
}
});
};<|fim▁end|>
|
webpackConfig.entry = {};
module.exports = function (config) {
config.set({
|
<|file_name|>browser_test.go<|end_file_name|><|fim▁begin|>package payload_test
import (
"encoding/json"
"fmt"
"testing"
"github.com/RobotsAndPencils/buford/payload"
)
func ExampleBrowser() {
p := payload.Browser{
Alert: payload.BrowserAlert{
Title: "Flight A998 Now Boarding",
Body: "Boarding has begun for Flight A998.",
Action: "View",
},
URLArgs: []string{"boarding", "A998"},
}
b, err := json.Marshal(p)
if err != nil {
// handle error
}
fmt.Printf("%s", b)
// Output: {"aps":{"alert":{"title":"Flight A998 Now Boarding","body":"Boarding has begun for Flight A998.","action":"View"},"url-args":["boarding","A998"]}}
}
func TestBrowser(t *testing.T) {
p := payload.Browser{
Alert: payload.BrowserAlert{
Title: "Flight A998 Now Boarding",
Body: "Boarding has begun for Flight A998.",
Action: "View",
},
URLArgs: []string{"boarding", "A998"},
}
expected := []byte(`{"aps":{"alert":{"title":"Flight A998 Now Boarding","body":"Boarding has begun for Flight A998.","action":"View"},"url-args":["boarding","A998"]}}`)
testPayload(t, p, expected)
}
func TestValidBrowser(t *testing.T) {
p := payload.Browser{
Alert: payload.BrowserAlert{
Title: "Flight A998 Now Boarding",
Body: "Boarding has begun for Flight A998.",
},
}<|fim▁hole|> if err := p.Validate(); err != nil {
t.Errorf("Expected no error, got %v.", err)
}
}
func TestInvalidBrowser(t *testing.T) {
tests := []*payload.Browser{
{
Alert: payload.BrowserAlert{Action: "View"},
},
{},
nil,
}
for _, p := range tests {
if err := p.Validate(); err != payload.ErrIncomplete {
t.Errorf("Expected err %v, got %v.", payload.ErrIncomplete, err)
}
}
}<|fim▁end|>
| |
<|file_name|>check_name_request.py<|end_file_name|><|fim▁begin|># coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CheckNameRequest(Model):
"""CheckNameRequest.
:param name: Workspace collection name
:type name: str
:param type: Resource type. Default value:
"Microsoft.PowerBI/workspaceCollections" .
:type type: str
"""
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},<|fim▁hole|>
def __init__(self, name=None, type="Microsoft.PowerBI/workspaceCollections"):
self.name = name
self.type = type<|fim▁end|>
|
}
|
<|file_name|>color-mode.js<|end_file_name|><|fim▁begin|>import React from 'react'
import Icon from 'react-icon-base'
<|fim▁hole|> <g><path d="m5 5v30h30v-30h-30z m2.5 27.5v-25h25l-25 25z"/></g>
</Icon>
)
export default GoColorMode<|fim▁end|>
|
const GoColorMode = props => (
<Icon viewBox="0 0 40 40" {...props}>
|
<|file_name|>match-in-macro.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(macro_rules, struct_variant)]
<|fim▁hole|> B { b1: int, bb1: int},
}
macro_rules! match_inside_expansion(
() => (
match B { b1:29 , bb1: 100} {
B { b1:b2 , bb1:bb2 } => b2+bb2
}
)
)
pub fn main() {
assert_eq!(match_inside_expansion!(),129);
}<|fim▁end|>
|
enum Foo {
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013, Mirantis Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the<|fim▁hole|>#
# @author: Tatiana Mazur
from django.conf.urls.defaults import patterns # noqa
from django.conf.urls.defaults import url # noqa
from openstack_dashboard.dashboards.project.vpn import views
urlpatterns = patterns('openstack_dashboard.dashboards.project.vpn.views',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^addikepolicy$',
views.AddIKEPolicyView.as_view(), name='addikepolicy'),
url(r'^addipsecpolicy$',
views.AddIPSecPolicyView.as_view(), name='addipsecpolicy'),
url(r'^addipsecsiteconnection$',
views.AddIPSecSiteConnectionView.as_view(),
name='addipsecsiteconnection'),
url(r'^addvpnservice$',
views.AddVPNServiceView.as_view(), name='addvpnservice'),
url(r'^ikepolicy/(?P<ikepolicy_id>[^/]+)/$',
views.IKEPolicyDetailsView.as_view(), name='ikepolicydetails'),
url(r'^ipsecpolicy/(?P<ipsecpolicy_id>[^/]+)/$',
views.IPSecPolicyDetailsView.as_view(), name='ipsecpolicydetails'),
url(r'^vpnservice/(?P<vpnservice_id>[^/]+)/$',
views.VPNServiceDetailsView.as_view(), name='vpnservicedetails'),
url(r'^ipsecsiteconnection/(?P<ipsecsiteconnection_id>[^/]+)/$',
views.IPSecSiteConnectionDetailsView.as_view(),
name='ipsecsiteconnectiondetails'))<|fim▁end|>
|
# License for the specific language governing permissions and limitations
# under the License.
|
<|file_name|>blockly.js<|end_file_name|><|fim▁begin|>/*jshint esversion: 6 */
import Service from '@ember/service';
export default Service.extend({
createCustomBlock(name, options, callback_to_change_block) {
options.colour = options.colour || '#4453ff';
if (Blockly.Blocks[name]) {
//console.warn(`Redefiniendo el bloque ${name}`);
}
Blockly.Blocks[name] = {
init: function () {
this.jsonInit(options);
if (callback_to_change_block) {
callback_to_change_block.call(this);
}
}
};
Blockly.Blocks[name].isCustomBlock = true;
if (!Blockly.MyLanguage) {
Blockly.MyLanguage = Blockly.JavaScript;
}
if (options.code) {
Blockly.MyLanguage[name] = function (block) {
let variables = options.code.match(/\$(\w+)/g);
let code = options.code;
if (variables) {
variables.forEach((v) => {
let regex = new RegExp('\\' + v, "g");
let variable_name = v.slice(1);
var variable_object = null;
if (variable_name === "DO") {
variable_object = Blockly.JavaScript.statementToCode(block, variable_name);
} else {
variable_object = Blockly.MyLanguage.valueToCode(block, variable_name) || block.getFieldValue(variable_name) || null;
}
code = code.replace(regex, variable_object);
});
}
return code;
};
}
return Blockly.Blocks[name];
},
createBlockWithAsyncDropdown(name, options) {
function callback_to_change_block() {
this.
appendDummyInput().
appendField(options.label || "").
appendField(new Blockly.FieldDropdown(options.callbackDropdown), 'DROPDOWN_VALUE');
}
return this.createCustomBlock(name, options, callback_to_change_block);
},
createCustomBlockWithHelper(name, options) {
let block_def = {
message0: options.descripcion,
colour: options.colour || '#4a6cd4',
previousStatement: true,
nextStatement: true,
args0: [],
code: options.code || `hacer(actor_id, "${options.comportamiento}", ${options.argumentos});`,
};
if (options.icono) {
block_def.message0 = `%1 ${options.descripcion}`;
block_def.args0.push({
"type": "field_image",
"src": `iconos/${options.icono}`,
"width": 16,
"height": 16,
"alt": "*"
});
}
return this.createCustomBlock(name, block_def);
},
createBlockValue(name, options) {
let block = this.createCustomBlock(name, {
message0: `%1 ${options.descripcion}`,
colour: options.colour || '#4a6cd4',
output: 'String',
args0: [
{
"type": "field_image",
"src": `iconos/${options.icono}`,
"width": 16,
"height": 16,
"alt": "*"
}
],
});
Blockly.MyLanguage[name] = function () {
return [`'${options.valor}'`, Blockly.JavaScript.ORDER_ATOMIC];
};
return block;
},
getBlocksList() {
return Object.keys(Blockly.Blocks);
},<|fim▁hole|> return Blockly.Blocks[e].isCustomBlock;
}
);
},
createAlias(new_name, original_block_name) {
let original_block = Blockly.Blocks[original_block_name];
Blockly.Blocks[new_name] = Object.assign({}, original_block);
let new_block = Blockly.Blocks[new_name];
new_block.isCustomBlock = true;
new_block.aliases = [original_block_name];
if (!original_block.aliases)
original_block.aliases = [];
original_block.aliases.push(new_name);
if (!Blockly.MyLanguage) {
Blockly.MyLanguage = Blockly.JavaScript;
}
Blockly.MyLanguage[new_name] = Blockly.JavaScript[original_block_name];
return Blockly.Blocks[new_name];
},
setStartHat(state) {
Blockly.BlockSvg.START_HAT = state;
}
});<|fim▁end|>
|
getCustomBlocksList() {
return Object.keys(Blockly.Blocks).filter((e) => {
|
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""proyectoP4 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views<|fim▁hole|>Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url, patterns
from django.contrib import admin
from Workinout import views
from django.conf import settings
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^Workinout/', include('Workinout.urls')), # ADD THIS NEW TUPLE!media/(?P<path>.*)
]
if settings.DEBUG:
urlpatterns += patterns(
'django.views.static',
(r'media/(?P<path>.*)',
'serve',
{'document_root': settings.MEDIA_ROOT}), )
else:
urlpatterns += patterns('', url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_PATH}),
)<|fim▁end|>
|
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
|
<|file_name|>read_pixels.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 -i
import serial # if you have not already done so
from time import sleep
import matplotlib.pyplot as plt
import re
import datetime
import numpy
import pickle
class DataExtruder:
<|fim▁hole|> def __init__(self,port='/dev/ttyACM0',baudrate=115200):
self.pattern_pixels=re.compile(r'data=(?P<pixels>[\w ]*) \((?P<nerror>\d*) errors')
self.port=port
self.baudrate=baudrate
self.ser = None
self.data={
'pixels':[],
'nerror':[]
}
self.figure=plt.figure(figsize=[20,8])
self.figure.show()
self.figure_axe=self.figure.gca()
def acquire(self,plot=True):
if self.ser is None:
self.ser=serial.Serial(self.port, self.baudrate)
else:
print('serial connection alredy opened')
print('starting acquisition, press Ctrl+C to stop.')
try:
while True:
data_serial=self.ser.readline().decode('utf-8')
m=self.pattern_pixels.match(data_serial)
if m:
pixels_num=[];
pixels_ascii=m.group('pixels');
i=0
npixel=0
while i+1<len(pixels_ascii):
if pixels_ascii[i]==' ':
if pixels_ascii[i+1]==' ':
pixels_num.append(-1)
i=i+2
else:
print('ERROR reading pixel')
break
else:
pixel=255-int(pixels_ascii[i:i+2],16)
pixels_num.append(pixel)
i=i+2
npixel=npixel+1
self.data['pixels'].append(pixels_num)
self.data['nerror'].append(int(m.group('nerror')))
if plot:
self.plot_pixels()
sleep(0.05)
except KeyboardInterrupt:
pass
self.ser.close()
self.ser=None
def plot_pixels(self):
plt.cla()
self.figure_axe.set_position([0.05,0.1,0.94,0.8])
if len(self.data['pixels'])==0:
return
last_reading=self.data['pixels'][len(self.data['pixels'])-1]
if len(last_reading)!=3648:
return
x=range(1,3649)
self.plt_pixels,=plt.plot(x,last_reading,'b-')
self.figure_axe.set_ylim([-1,255])
self.figure_axe.set_xlim([1,3648])
self.figure_axe.set_ylabel('pixel value')
self.figure_axe.set_xlabel('pixel')
plt.draw()
if __name__ == '__main__':
test=DataExtruder(port='/dev/ttyACM0',baudrate=115200)
test.acquire()<|fim▁end|>
| |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ast
import re
import mock
import novaclient.exceptions as nova_ex
import six
from sahara.conductor import resource as r
from sahara.plugins.vanilla import plugin
import sahara.service.validation as v
from sahara.tests.unit import base
from sahara.tests.unit import testutils as tu
m = {}
_types_checks = {
"string": [1, (), {}, True],
"integer": ["a", (), {}, True],
"uuid": ["z550e8400-e29b-41d4-a716-446655440000", 1, "a", (), {}, True],
"array": [{}, 'a', 1, True],
"boolean": [1, 'a', (), {}]
}
def _update_data(data, update):
data.update(update)
return data
def _get_plugins():
vanilla = plugin.VanillaProvider
vanilla.name = 'vanilla'
return [vanilla]
def _get_plugin(name):
if name == 'vanilla':
vanilla = plugin.VanillaProvider
vanilla.name = 'vanilla'
return vanilla
return None
def _get_keypair(name):
if name != "test_keypair":
raise nova_ex.NotFound("")
def _get_network(**kwargs):
if 'id' in kwargs and (
kwargs['id'] != "d9a3bebc-f788-4b81-9a93-aa048022c1ca"):
raise nova_ex.NotFound("")
return 'OK'
def _get_fl_ip_pool_list():
return [FakeNetwork("d9a3bebc-f788-4b81-9a93-aa048022c1ca")]
def _get_availability_zone_list(detailed=True):
return [FakeAvailabilityZone('nova')]
def _get_heat_stack_list(**kwargs):
if (kwargs.get('filters') and
kwargs.get('filters').get('name') == 'test-heat'):
return [FakeStack('test-heat')]
return []
class FakeStack(object):
def __init__(self, name):
self.stack_name = name
class FakeNetwork(object):
def __init__(self, name):
self.name = name
class FakeAvailabilityZone(object):
def __init__(self, name):
self.zoneName = name
class FakeFlavor(object):
def __init__(self, id):
self.id = id
class FakeSecurityGroup(object):
def __init__(self, id, name):
self.id = id
self.name = name
def _get_flavors_list():
return [FakeFlavor("42")]
def _get_security_groups_list():
return [FakeSecurityGroup("1", "default"),
FakeSecurityGroup("2", "group1"),
FakeSecurityGroup("3", "group2")]
def start_patch(patch_templates=True):
get_clusters_p = mock.patch("sahara.service.api.get_clusters")
get_cluster_p = mock.patch("sahara.service.api.get_cluster")
if patch_templates:
get_ng_templates_p = mock.patch(
"sahara.service.api.get_node_group_templates")
get_ng_template_p = mock.patch(
"sahara.service.api.get_node_group_template")
if patch_templates:
get_cl_templates_p = mock.patch(
"sahara.service.api.get_cluster_templates")
get_cl_template_p = mock.patch(
"sahara.service.api.get_cluster_template")
nova_p = mock.patch("sahara.utils.openstack.nova.client")
heat_p = mock.patch("sahara.utils.openstack.heat.client")
cinder_p = mock.patch("sahara.utils.openstack.cinder.client")
cinder_exists_p = mock.patch(
"sahara.utils.openstack.cinder.check_cinder_exists")
get_image_p = mock.patch("sahara.service.api.get_image")
get_image = get_image_p.start()
get_clusters = get_clusters_p.start()
get_cluster = get_cluster_p.start()
if patch_templates:
get_ng_templates = get_ng_templates_p.start()
get_ng_template = get_ng_template_p.start()
if patch_templates:
get_cl_templates = get_cl_templates_p.start()
get_cl_template_p.start()
nova = nova_p.start()
if patch_templates:
get_cl_templates.return_value = []
nova().flavors.list.side_effect = _get_flavors_list
nova().security_groups.list.side_effect = _get_security_groups_list
nova().keypairs.get.side_effect = _get_keypair
nova().networks.find.side_effect = _get_network
nova().networks.find.__name__ = 'find'
nova().floating_ip_pools.list.side_effect = _get_fl_ip_pool_list
nova().availability_zones.list.side_effect = _get_availability_zone_list
heat = heat_p.start()
heat().stacks.list.side_effect = _get_heat_stack_list
cinder = cinder_p.start()
cinder().availability_zones.list.side_effect = _get_availability_zone_list
cinder_exists = cinder_exists_p.start()
cinder_exists.return_value = True
class Image(object):
def __init__(self, name='test'):
self.name = name
@property
def id(self):
if self.name == 'test':
return '550e8400-e29b-41d4-a716-446655440000'
else:
return '813fe450-40d2-4acc-ade5-ea753a1bd5bc'
@property
def tags(self):
if self.name == 'test':
return ['vanilla', '1.2.1']
else:
return ['vanilla', 'wrong_tag']
def _get_image(id):
if id == '550e8400-e29b-41d4-a716-446655440000':
return Image()
else:
return Image('wrong_test')
get_image.side_effect = _get_image
nova().images.list_registered.return_value = [Image(),
Image(name='wrong_name')]
ng_dict = tu.make_ng_dict('ng', '42', ['namenode'], 1)
cluster = tu.create_cluster('test', 't', 'vanilla', '1.2.1', [ng_dict],
id=1, status='Active')
# stub clusters list
get_clusters.return_value = [cluster]
get_cluster.return_value = cluster
# stub node templates
if patch_templates:
ngt_dict = {'name': 'test', 'tenant_id': 't', 'flavor_id': '42',
'plugin_name': 'vanilla', 'hadoop_version': '1.2.1',
'id': '550e8400-e29b-41d4-a716-446655440000',
'node_processes': ['namenode']}
get_ng_templates.return_value = [r.NodeGroupTemplateResource(ngt_dict)]
ct_dict = {'name': 'test', 'tenant_id': 't',
'plugin_name': 'vanilla', 'hadoop_version': '1.2.1'}
get_cl_templates.return_value = [r.ClusterTemplateResource(ct_dict)]
def _get_ng_template(id):
for template in get_ng_templates():
if template.id == id:
return template
return None
if patch_templates:
get_ng_template.side_effect = _get_ng_template
# request data to validate
patchers = [get_clusters_p, get_cluster_p,
nova_p, get_image_p, heat_p, cinder_p,
cinder_exists_p]
if patch_templates:
patchers.extend([get_ng_template_p, get_ng_templates_p,
get_cl_template_p, get_cl_templates_p])
return patchers
def stop_patch(patchers):
for patcher in reversed(patchers):
patcher.stop()
class ValidationTestCase(base.SaharaTestCase):
def setUp(self):
super(ValidationTestCase, self).setUp()
self._create_object_fun = None
self.scheme = None
def tearDown(self):
self._create_object_fun = None
super(ValidationTestCase, self).tearDown()
def _assert_calls(self, mock, call_info):
if not call_info:
self.assertEqual(0, mock.call_count, "Unexpected call to %s: %s"
% (mock.name, str(mock.call_args)))
else:
self.assertEqual(call_info[0], mock.call_count)
self.assertEqual(call_info[1], mock.call_args[0][0].code)
possible_messages = ([call_info[2]] if isinstance(
call_info[2], six.string_types) else call_info[2])
match = False
check = mock.call_args[0][0].message
if check.find('Error ID:') != -1:
check = check.split('\n')[0]
for message in possible_messages:
if self._check_match(message, check):
match = True
break
if not match:
self.assertIn(check, possible_messages)
def _check_match(self, expected, actual):
d1, r1 = self._extract_printed_dict(expected)
d2, r2 = self._extract_printed_dict(actual)
# Note(slukjanov): regex needed because of different
# versions of jsonschema generate different<|fim▁hole|> # messages.
return (r1 == r2 or re.match(r1, r2)) and (d1 == d2)
def _extract_printed_dict(self, s):
start = s.find('{')
if start == -1:
return None, s
end = s.rfind('}')
if end == -1:
return None, s
return ast.literal_eval(s[start:end+1]), s[0:start+1] + s[end]
@mock.patch("sahara.utils.api.request_data")
@mock.patch("sahara.utils.api.bad_request")
def _assert_create_object_validation(
self, bad_req=None, request_data=None,
data=None, bad_req_i=None):
request_data.return_value = data
# mock function that should be validated
patchers = start_patch()
m_func = mock.Mock()
m_func.__name__ = "m_func"
v.validate(self.scheme, self._create_object_fun)(m_func)(data=data)
self.assertEqual(1, request_data.call_count)
self._assert_calls(bad_req, bad_req_i)
stop_patch(patchers)
def _assert_valid_name_hostname_validation(self, data):
data.update({'name': None})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"None is not of type 'string'")
)
data.update({'name': ""})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'' is too short")
)
data.update({'name': ('a' * 51)})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'%s' is too long" % ('a' * 51))
)
data.update({'name': 'a-!'})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'a-!' is not a 'valid_name_hostname'")
)
def _prop_types_str(self, prop_types):
return ", ".join(["'%s'" % prop for prop in prop_types])
def _assert_types(self, default_data):
for p_name in self.scheme['properties']:
prop = self.scheme['properties'][p_name]
prop_types = prop["type"]
if type(prop_types) is not list:
prop_types = [prop_types]
for prop_type in prop_types:
if prop_type in _types_checks:
for type_ex in _types_checks[prop_type]:
data = default_data.copy()
value = type_ex
value_str = str(value)
if isinstance(value, str):
value_str = "'%s'" % value_str
data.update({p_name: value})
message = ("%s is not of type %s" %
(value_str,
self._prop_types_str(prop_types)))
if "enum" in prop:
message = [message, "%s is not one of %s" %
(value_str, prop["enum"])]
self._assert_create_object_validation(
data=data,
bad_req_i=(1, 'VALIDATION_ERROR', message)
)
def _assert_cluster_configs_validation(self, require_image_id=False):
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'cluster_configs': {
'HDFS': {
u'hadoop.tmp.dir': '/temp/'
}
},
'default_image_id': '550e8400-e29b-41d4-a716-446655440000'
}
if require_image_id:
data_without_image = data.copy()
data_without_image.pop('default_image_id')
self._assert_create_object_validation(
data=data_without_image,
bad_req_i=(1, 'NOT_FOUND',
"'default_image_id' field is not found")
)
self._assert_create_object_validation(
data=_update_data(data.copy(), {
'cluster_configs': {
'wrong_target': {
u'hadoop.tmp.dir': '/temp/'
}
}}),
bad_req_i=(1, 'INVALID_REFERENCE',
"Plugin doesn't contain applicable "
"target 'wrong_target'")
)
self._assert_create_object_validation(
data=_update_data(data.copy(), {
'cluster_configs': {
'HDFS': {
u's': '/temp/'
}
}
}),
bad_req_i=(1, 'INVALID_REFERENCE',
"Plugin's applicable target 'HDFS' doesn't "
"contain config with name 's'")
)
def _assert_cluster_default_image_tags_validation(self):
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'default_image_id': '550e8400-e29b-41d4-a716-446655440000'
}
self._assert_create_object_validation(data=data)
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'default_image_id': '813fe450-40d2-4acc-ade5-ea753a1bd5bc'
}
self._assert_create_object_validation(
data=data,
bad_req_i=(1, 'INVALID_REFERENCE',
"Requested image "
"'813fe450-40d2-4acc-ade5-ea753a1bd5bc' "
"doesn't contain required tags: "
"['1.2.1']"))
def assert_protected_resource_exception(self, ex):
self.assertIn("marked as protected", six.text_type(ex))
def assert_created_in_another_tenant_exception(self, ex):
self.assertIn("wasn't created in this tenant", six.text_type(ex))<|fim▁end|>
| |
<|file_name|>rt-set-exit-status-fail.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or<|fim▁hole|>// option. This file may not be copied, modified, or distributed
// except according to those terms.
// error-pattern:whatever
use std::os;
fn main() {
error2!("whatever");
// Setting the exit status only works when the scheduler terminates
// normally. In this case we're going to fail, so instead of of
// returning 50 the process will return the typical rt failure code.
os::set_exit_status(50);
fail2!();
}<|fim▁end|>
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|>from rest_framework import relations, serializers
import amo
import mkt.carriers
import mkt.regions
from addons.models import Category
from mkt.api.fields import SplitField, TranslationSerializerField
from mkt.api.serializers import URLSerializerMixin
from mkt.collections.serializers import (CollectionSerializer, SlugChoiceField,
SlugModelChoiceField)
from mkt.submit.serializers import PreviewSerializer
from mkt.webapps.api import AppSerializer
from .models import FeedApp, FeedItem
class FeedAppSerializer(URLSerializerMixin, serializers.ModelSerializer):
app = SplitField(relations.PrimaryKeyRelatedField(required=True),
AppSerializer())
description = TranslationSerializerField(required=False)
preview = SplitField(relations.PrimaryKeyRelatedField(required=False),
PreviewSerializer())
pullquote_attribution = TranslationSerializerField(required=False)
pullquote_rating = serializers.IntegerField(required=False)
pullquote_text = TranslationSerializerField(required=False)
class Meta:
fields = ('app', 'description', 'id', 'preview',
'pullquote_attribution', 'pullquote_rating', 'pullquote_text',
'url')
model = FeedApp<|fim▁hole|>
class FeedItemSerializer(URLSerializerMixin, serializers.ModelSerializer):
carrier = SlugChoiceField(required=False,
choices_dict=mkt.carriers.CARRIER_MAP)
region = SlugChoiceField(required=False,
choices_dict=mkt.regions.REGION_LOOKUP)
category = SlugModelChoiceField(required=False,
queryset=Category.objects.filter(type=amo.ADDON_WEBAPP))
item_type = serializers.SerializerMethodField('get_item_type')
# Types of objects that are allowed to be a feed item.
collection = SplitField(relations.PrimaryKeyRelatedField(required=False),
CollectionSerializer())
class Meta:
fields = ('carrier', 'category', 'collection', 'id', 'item_type',
'region', 'url')
item_types = ('collection',)
model = FeedItem
url_basename = 'feeditem'
def validate(self, attrs):
"""
Ensure that at least one object type is specified.
"""
item_changed = any(k for k in self.Meta.item_types if k in attrs.keys())
num_defined = sum(1 for item in self.Meta.item_types if attrs.get(item))
if item_changed and num_defined != 1:
message = ('A valid value for exactly one of the following '
'parameters must be defined: %s' % ','.join(
self.Meta.item_types))
raise serializers.ValidationError(message)
return attrs
def get_item_type(self, obj):
for item_type in self.Meta.item_types:
if getattr(obj, item_type):
return item_type
return<|fim▁end|>
|
url_basename = 'feedapp'
|
<|file_name|>app-build-details.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, NgZone, Inject, OnDestroy } from '@angular/core';
import { DOCUMENT } from '@angular/common';
import { Title } from '@angular/platform-browser';
import { ActivatedRoute, Router } from '@angular/router';
import { ApiService } from '../../services/api.service';
import { TimeService } from '../../services/time.service';
import { AuthService } from '../../services/auth.service';
import { SocketService } from '../../services/socket.service';
import { distanceInWordsToNow, distanceInWordsStrict, format } from 'date-fns';
import { Subscription } from 'rxjs';
import { filter } from 'rxjs/operators';
@Component({
selector: 'app-build-details',
templateUrl: 'app-build-details.component.html'
})
export class AppBuildDetailsComponent implements OnInit, OnDestroy {
loading: boolean;
id: string;
build: any;
status: string;
timeWords: string;
maxCompletedJobTime: number;
minRunningJobStartTime: number;
previousRuntime: number;
processingBuild: boolean;
tag: string = null;
updateInterval: any;
subStatus: Subscription;
sub: Subscription;
subUpdate: Subscription;
userData: any;
userId: string | null;
committerAvatar: string;
authorAvatar: string;
nameAuthor: string;
nameCommitter: string;
timerSubscription: any = null;
currentTime: number;
commitMessage: string;
dateTime: string;
dateTimeToNow: string;
constructor(
private socketService: SocketService,
private apiService: ApiService,
private timeService: TimeService,
private authService: AuthService,
private route: ActivatedRoute,
private ngZone: NgZone,
private router: Router,
@Inject(DOCUMENT) private document: any,
private titleService: Title
) {
this.loading = true;
this.status = 'queued';
this.currentTime = new Date().getTime();
}
ngOnInit() {
this.userData = this.authService.getData();
this.route.params.subscribe(params => {
this.id = params.id;
this.userId = this.userData && this.userData.id || null;
this.apiService.getBuild(this.id, this.userId).subscribe(build => {
this.loading = false;
this.build = build;
if (this.build.data && this.build.data.ref && this.build.data.ref.startsWith('refs/tags')) {
this.tag = this.build.data.ref.replace('refs/tags/', '');
}
this.setData();
this.build.jobs.forEach(job => job.time = '00:00');
this.timeWords = distanceInWordsToNow(this.build.created_at);
this.previousRuntime = 0;
if (this.build.lastBuild) {
let maxJobTime = Math.max(...this.build.lastBuild.job_runs.map(job => job.end_time - job.start_time));
maxJobTime ? this.previousRuntime = maxJobTime : this.previousRuntime = 0;
}
this.status = this.getBuildStatus();<|fim▁hole|> .subscribe(event => {
let index = this.build.jobs.findIndex(job => job.id === event.job_id);
if (index !== -1) {
if (event.data === 'job started') {
this.build.jobs[index].status = 'running';
this.build.jobs[index].end_time = null;
this.build.jobs[index].start_time = event.additionalData;
this.build.jobs[index].runs.push({ start_time: event.additionalData, end_time: null });
} else if (event.data === 'job succeded') {
this.build.jobs[index].status = 'success';
this.build.jobs[index].end_time = event.additionalData;
this.build.jobs[index].runs[this.build.jobs[index].runs.length - 1].end_time = event.additionalData;
} else if (event.data === 'job failed') {
this.build.jobs[index].status = 'failed';
if (!this.build.jobs[index].end_time) {
this.build.jobs[index].end_time = event.additionalData;
}
if (!this.build.jobs[index].runs[this.build.jobs[index].runs.length - 1].end_time) {
this.build.jobs[index].runs[this.build.jobs[index].runs.length - 1].end_time = event.additionalData;
}
} else if (event.data === 'job stopped') {
if (this.build.jobs[index].status !== 'success') {
this.build.jobs[index].status = 'failed';
}
if (!this.build.jobs[index].end_time) {
this.build.jobs[index].end_time = event.additionalData;
}
if (!this.build.jobs[index].runs[this.build.jobs[index].runs.length - 1].end_time) {
this.build.jobs[index].runs[this.build.jobs[index].runs.length - 1].end_time = event.additionalData;
}
} else if (event.data === 'job queued') {
this.build.jobs[index].status = 'queued';
}
this.build.jobs[index].processing = false;
this.status = this.getBuildStatus();
this.updateJobTimes();
}
});
this.sub = this.socketService.outputEvents
.pipe(filter(event => event.type === 'build stopped' || event.type === 'build restarted'))
.subscribe(event => {
this.processingBuild = false;
});
this.subUpdate = this.socketService.outputEvents
.pipe(filter(event => event.data === 'build restarted' || event.data === 'build succeeded' || event.data === 'build failed'))
.subscribe(event => {
if (event.build_id === Number(this.id)) {
if (event.data === 'build restarted') {
this.build.start_time = event.additionalData;
this.processingBuild = false;
} else {
this.build.end_time = event.additionalData;
}
}
});
});
});
}
ngOnDestroy() {
if (this.sub) {
this.sub.unsubscribe();
}
if (this.subUpdate) {
this.subUpdate.unsubscribe();
}
if (this.subStatus) {
this.subStatus.unsubscribe();
}
if (this.timerSubscription) {
this.timerSubscription.unsubscribe();
}
if (this.document.getElementById('favicon')) {
this.document.getElementById('favicon').setAttribute('href', 'assets/images/favicon.png');
}
this.titleService.setTitle('Abstruse CI');
}
updateJobTimes(): void {
this.maxCompletedJobTime = Math.max(...this.build.jobs.map(job => job.end_time - job.start_time));
if (this.status === 'running') {
this.minRunningJobStartTime = Math.min(...this.build.jobs
.filter(job => job.status === 'running').map(job => job.start_time));
}
this.build.jobs = this.build.jobs.map(job => {
const lastRun = job.runs && job.runs[job.runs.length - 1].end_time ?
job.runs[job.runs.length - 1] : job.runs[job.runs.length - 2];
if (lastRun) {
job.lastRunTime = lastRun.end_time - lastRun.start_time;
}
return job;
});
}
getBuildStatus(): string {
let status = 'queued';
let favicon = 'assets/images/favicon-queued.png';
if (this.build && this.build.jobs) {
if (this.build.jobs.findIndex(job => job.status === 'failed') !== -1) {
status = 'failed';
favicon = 'assets/images/favicon-error.png';
}
if (this.build.jobs.findIndex(job => job.status === 'running') !== -1) {
status = 'running';
favicon = 'assets/images/favicon-running.png';
}
if (this.build.jobs.length === this.build.jobs.filter(j => j.status === 'success').length) {
status = 'success';
favicon = 'assets/images/favicon-success.png';
}
}
const name = this.build.repository.full_name;
if (this.document.getElementById('favicon')) {
this.document.getElementById('favicon').setAttribute('href', favicon);
}
this.titleService.setTitle(`${name} - ${status}`);
return status;
}
restartJob(e: MouseEvent, jobId: number): void {
e.preventDefault();
e.stopPropagation();
const index = this.build.jobs.findIndex(job => job.id === jobId);
this.build.jobs[index].processing = true;
this.socketService.emit({ type: 'restartJob', data: { jobId: jobId } });
}
stopJob(e: MouseEvent, jobId: number): void {
e.preventDefault();
e.stopPropagation();
const index = this.build.jobs.findIndex(job => job.id === jobId);
this.build.jobs[index].processing = true;
this.socketService.emit({ type: 'stopJob', data: { jobId: jobId } });
}
restartBuild(e: MouseEvent, id: number): void {
e.preventDefault();
e.stopPropagation();
this.previousRuntime = 0;
let maxJobTime = Math.max(...this.build.jobs.map(job => job.end_time - job.start_time));
maxJobTime ? this.previousRuntime = maxJobTime : this.previousRuntime = 0;
this.processingBuild = true;
this.socketService.emit({ type: 'restartBuild', data: { buildId: id } });
}
stopBuild(e: MouseEvent, id: number): void {
e.preventDefault();
e.stopPropagation();
this.processingBuild = true;
this.socketService.emit({ type: 'stopBuild', data: { buildId: id } });
}
gotoJob(e: MouseEvent, jobId: number): void {
e.preventDefault();
e.stopPropagation();
this.router.navigate(['job', jobId]);
}
setData(): void {
const data = this.build.data;
this.dateTime = data.pull_request && data.pull_request.updated_at ||
data.commit && data.commit.author && data.commit.author.date ||
data.commits && data.commits[data.commits.length - 1] && data.commits[data.commits.length - 1].timestamp ||
data.head_commit && data.head_commit.timestamp ||
null;
if (this.build.repository.repository_provider === 'github') {
if (this.build.data.commit) {
this.commitMessage = this.build.data.commit.message;
} else if (this.build.data.commits && this.build.data.commits.length > 0) {
const len = this.build.data.commits.length - 1;
this.commitMessage = this.build.data.commits[len].message;
} else if (this.build.data.pull_request && this.build.data.pull_request.title) {
this.commitMessage = this.build.data.pull_request.title;
} else if (this.build.data.head_commit) {
this.commitMessage = this.build.data.head_commit.message;
}
if (this.build.data.sha) {
const buildData = this.build.data;
this.committerAvatar = buildData.committer.avatar_url;
this.nameCommitter = buildData.commit.committer.name;
this.authorAvatar = buildData.author.avatar_url;
this.nameAuthor = buildData.commit.author.name;
} else if (this.build.data.head_commit) {
const commit = this.build.data.head_commit;
this.committerAvatar = this.build.data.sender.avatar_url;
this.nameAuthor = this.build.data.head_commit.author.name;
this.nameCommitter = this.build.data.head_commit.committer.name;
if (commit.author.username !== commit.committer.username) {
this.nameCommitter = commit.committer.name;
this.apiService.getGithubUserData(commit.author.username).subscribe((evt: any) => {
if (evt.status === 200) {
const body = JSON.parse(evt._body);
this.authorAvatar = body.avatar_url;
}
});
} else {
this.authorAvatar = this.committerAvatar;
this.nameCommitter = this.nameAuthor;
}
} else if (this.build.data.pull_request) {
this.authorAvatar = this.build.data.sender.avatar_url;
this.committerAvatar = this.authorAvatar;
this.apiService.getGithubUserData(this.build.data.sender.login).subscribe((evt: any) => {
if (evt.status === 200) {
const body = JSON.parse(evt._body);
this.nameAuthor = body.name;
}
});
this.apiService.getGithubUserData(this.build.data.pull_request.user.login).subscribe((evt: any) => {
if (evt.status === 200) {
const body = JSON.parse(evt._body);
this.nameCommitter = body.name;
}
});
}
} else if (this.build.repository.repository_provider === 'bitbucket') {
// bitbucket
if (this.build.data.actor) {
this.authorAvatar = this.build.data.actor.links.avatar.href;
this.nameAuthor = this.build.data.actor.display_name;
}
if (this.build.data.push) {
this.commitMessage = this.build.data.push.changes[0].commits[0].message;
this.dateTime = this.build.data.push.changes[0].commits[0].date;
this.committerAvatar = this.build.data.push.changes[0].commits[0].author.user.links.avatar.href;
this.nameCommitter = this.build.data.push.changes[0].commits[0].author.user.display_name;
} else if (this.build.data.pullrequest) {
this.commitMessage = data.pullrequest.description;
this.dateTime = data.pullrequest.updated_on;
this.committerAvatar = data.pullrequest.author.links.avatar.href;
this.nameAuthor = data.pullrequest.author.display_name;
this.nameCommitter = this.nameAuthor;
}
} else if (this.build.repository.repository_provider === 'gitlab') {
// gitlab
if (data.commit) {
this.dateTime = data.commit.created_at;
this.commitMessage = data.commit.message;
this.nameCommitter = data.commit.committer_name;
this.nameAuthor = data.commit.author_name;
this.apiService.customGet(this.build.repository.api_url + '/users', {
username: this.build.repository.user_login
}).subscribe(userData => {
this.authorAvatar = userData[0].avatar_url;
});
} else if (data.user_avatar) {
this.authorAvatar = data.user_avatar;
this.commitMessage = data.commits[0].message;
this.dateTime = data.commits[0].timestamp;
this.committerAvatar = this.authorAvatar;
this.nameAuthor = data.user_name;
this.nameCommitter = data.commits[0].author.name;
} else if (data.object_attributes) {
this.authorAvatar = data.user.avatar_url;
this.commitMessage = data.object_attributes.last_commit.message;
this.dateTime = data.object_attributes.last_commit.timestamp;
this.committerAvatar = this.authorAvatar;
this.nameAuthor = data.user.name;
this.nameCommitter = data.object_attributes.last_commit.author.name;
}
} else if (this.build.repository.repository_provider === 'gogs') {
// gogs
if (data.pusher) {
this.authorAvatar = data.pusher.avatar_url;
this.nameAuthor = data.pusher.username;
}
if (data.sender) {
this.commitMessage = data.commits[0].message;
this.dateTime = data.commits[0].timestamp;
this.committerAvatar = data.sender.avatar_url;
this.nameCommitter = data.sender.username;
} else if (data.pull_request) {
this.authorAvatar = data.pull_request.user.avatar_url;
this.nameAuthor = data.pull_request.user.username;
this.commitMessage = data.pull_request.title;
this.dateTime = data.pull_request.head_repo.updated_at;
}
}
this.timerSubscription = this.timeService.getCurrentTime().subscribe(time => {
this.currentTime = time;
this.dateTimeToNow = distanceInWordsToNow(this.dateTime);
});
}
}<|fim▁end|>
|
this.updateJobTimes();
this.subStatus = this.socketService.outputEvents
.pipe(filter(event => event.type === 'process'))
|
<|file_name|>trl.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf8 -*-
from report_aeroo.ctt_objects import ctt_currency
class trl(ctt_currency):<|fim▁hole|> self.cur_singular = u' Lira'
# default plural form for currency
self.cur_plural = u' Lira'
self.frc_singular = u' kuruş'
# default plural form for fractions
self.frc_plural = u' kuruş'
# grammatical genders: f - feminine, m - masculine, n -neuter
self.cur_gram_gender = 'm'
self.frc_gram_gender = 'm'
trl()<|fim▁end|>
|
def _init_currency(self):
self.language = u'tr_TR'
self.code = u'TRL'
self.fractions = 100
|
<|file_name|>grabsudoku.py<|end_file_name|><|fim▁begin|>from bs4 import BeautifulSoup
import requests, re
n = int(input('How many sudoku\'s do you want to download (between 1 and 10)? '))
if n < 1 or n > 10:
die()
url = 'http://show.websudoku.com/?level=4'
for i in range(n):
page = requests.get(url)
page.raise_for_status()
rawPage=page.text
sudokuid = int(re.search(r'\d+', rawPage.split('\n')[20]).group())
soup = BeautifulSoup(rawPage,'html.parser')
sudokuTable = soup.findAll(True, {'class':['s0', 'd0']})
sudoku = [ [(int(item['value']) if item.get('class')[0] == 's0' else 0) for item in sudokuTable][i:i+9] for i in range(0, 81, 9) ]<|fim▁hole|>
filename = 'sudokus/sudoku_%i.txt'%sudokuid
sudokufile = open(filename, 'w')
for line in sudoku:
sudokufile.write( str(line).replace(',',' ').replace('[','').replace(']',' ') + '\n' )
input('Done!')<|fim▁end|>
| |
<|file_name|>NormalVerifier.java<|end_file_name|><|fim▁begin|>package org.python.util.install.driver;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.StringTokenizer;
import org.python.util.install.ChildProcess;
import org.python.util.install.FileHelper;
public class NormalVerifier implements Verifier {
protected static final String AUTOTEST_PY = "autotest.py";
private static final String BIN = "bin";
private static final String JYTHON_UP = "jython up and running!";
private static final String JYTHON = "jython";
private static final String VERIFYING = "verifying";
private File _targetDir;
public void setTargetDir(File targetDir) {
_targetDir = targetDir;
}
public File getTargetDir() {
return _targetDir;
}
public void verify() throws DriverException {
createTestScriptFile(); // create the test .py script
// verify the most simple start of jython works
verifyStart(getSimpleCommand());
}
/**
* Will be overridden in subclass StandaloneVerifier
*
* @return the command array to start jython with
* @throws DriverException
* if there was a problem getting the target directory path
*/
protected String[] getSimpleCommand() throws DriverException {
return new String[] {
Paths.get(BIN).resolve(JYTHON).toString(),
_targetDir.toPath().resolve(AUTOTEST_PY).toString() };
}
/**
* @return The directory where to create the shell script test command in.
*
* @throws DriverException
*/
protected final File getShellScriptTestCommandDir() throws DriverException {
return _targetDir.toPath().resolve(BIN).toFile();
}
/**
* Internal method verifying a jython-starting command by capturing the output
*
* @param command
*
* @throws DriverException
*/
private void verifyStart(String[] command) throws DriverException {
ChildProcess p = new ChildProcess(command);
p.setDebug(true);
p.setCWD(_targetDir.toPath());
System.err.println("Verify start: command=" + Arrays.toString(command) + ", cwd=" + p.getCWD());
int exitValue = p.run();
// if (exitValue != 0) {
// throw new DriverException("start of jython failed\n"
// + "command: " + Arrays.toString(command)
// + "\ncwd: " + p.getCWD()
// + "\nexit value: " + exitValue
// + "\nstdout: " + p.getStdout()
// + "\nstderr: " + p.getStderr());
// }
verifyError(p.getStderr());
verifyOutput(p.getStdout());
}
/**
* Will be overridden in subclass StandaloneVerifier
*
* @return <code>true</code> if the jython start shell script should be verified (using
* different options)
*/
protected boolean doShellScriptTests() {
return true;
}
private void verifyError(List<String> stderr) throws DriverException {
for (String line : stderr) {
if (isExpectedError(line)) {<|fim▁hole|> }
}
}
private boolean isExpectedError(String line) {
boolean expected = false;
if (line.startsWith("*sys-package-mgr*")) {
expected = true;
}
return expected;
}
private void verifyOutput(List<String> stdout) throws DriverException {
boolean started = false;
for (String line : stdout) {
if (isExpectedOutput(line)) {
feedback(line);
if (line.startsWith(JYTHON_UP)) {
started = true;
}
} else {
throw new DriverException(stdout.toString());
}
}
if (!started) {
throw new DriverException("start of jython failed:\n" + stdout.toString());
}
}
private boolean isExpectedOutput(String line) {
boolean expected = false;
if (line.startsWith("[ChildProcess]") || line.startsWith(VERIFYING)) {
expected = true;
} else if (line.startsWith(JYTHON_UP)) {
expected = true;
}
return expected;
}
private String getTestScript() {
StringBuilder b = new StringBuilder(80);
b.append("import sys\n");
b.append("import os\n");
b.append("print '");
b.append(JYTHON_UP);
b.append("'\n");
return b.toString();
}
private void createTestScriptFile() throws DriverException {
File file = new File(getTargetDir(), AUTOTEST_PY);
try {
FileHelper.write(file, getTestScript());
} catch (IOException ioe) {
throw new DriverException(ioe);
}
}
private void feedback(String line) {
System.out.println("feedback " + line);
}
}<|fim▁end|>
|
feedback(line);
} else {
throw new DriverException(stderr.toString());
|
<|file_name|>tut05_derived_objects.py<|end_file_name|><|fim▁begin|>"""
Tutorial - Object inheritance
You are free to derive your request handler classes from any base
class you wish. In most real-world applications, you will probably
want to create a central base class used for all your pages, which takes
care of things like printing a common page header and footer.
"""
import cherrypy
class Page:
# Store the page title in a class attribute
title = 'Untitled Page'
def header(self):
return '''
<html>
<head>
<title>%s</title>
<head>
<body>
<h2>%s</h2>
''' % (self.title, self.title)
def footer(self):
return '''
</body>
</html>
'''
# Note that header and footer don't get their exposed attributes
# set to True. This isn't necessary since the user isn't supposed
# to call header or footer directly; instead, we'll call them from
# within the actually exposed handler methods defined in this
# class' subclasses.
class HomePage(Page):
# Different title for this page
title = 'Tutorial 5'
def __init__(self):
# create a subpage
self.another = AnotherPage()
def index(self):
# Note that we call the header and footer methods inherited
# from the Page class!
return self.header() + '''
<p>
Isn't this exciting? There's
<a href="./another/">another page</a>, too!
</p>
''' + self.footer()
index.exposed = True
class AnotherPage(Page):
title = 'Another Page'
def index(self):
return self.header() + '''
<p><|fim▁hole|>
import os.path
tutconf = os.path.join(os.path.dirname(__file__), 'tutorial.conf')
if __name__ == '__main__':
# CherryPy always starts with app.root when trying to map request URIs
# to objects, so we need to mount a request handler root. A request
# to '/' will be mapped to HelloWorld().index().
cherrypy.quickstart(HomePage(), config=tutconf)
else:
# This branch is for the test suite; you can ignore it.
cherrypy.tree.mount(HomePage(), config=tutconf)<|fim▁end|>
|
And this is the amazing second page!
</p>
''' + self.footer()
index.exposed = True
|
<|file_name|>Protocol1_12_2To1_12_1.java<|end_file_name|><|fim▁begin|>/*
* This file is part of ViaVersion - https://github.com/ViaVersion/ViaVersion
* Copyright (C) 2016-2021 ViaVersion and contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.viaversion.viaversion.protocols.protocol1_12_2to1_12_1;
import com.viaversion.viaversion.api.protocol.AbstractProtocol;
import com.viaversion.viaversion.api.protocol.remapper.PacketRemapper;
import com.viaversion.viaversion.api.type.Type;
import com.viaversion.viaversion.protocols.protocol1_12_1to1_12.ClientboundPackets1_12_1;
import com.viaversion.viaversion.protocols.protocol1_12_1to1_12.ServerboundPackets1_12_1;
public class Protocol1_12_2To1_12_1 extends AbstractProtocol<ClientboundPackets1_12_1, ClientboundPackets1_12_1, ServerboundPackets1_12_1, ServerboundPackets1_12_1> {
public Protocol1_12_2To1_12_1() {
super(ClientboundPackets1_12_1.class, ClientboundPackets1_12_1.class, ServerboundPackets1_12_1.class, ServerboundPackets1_12_1.class);
}
@Override<|fim▁hole|> registerClientbound(ClientboundPackets1_12_1.KEEP_ALIVE, new PacketRemapper() {
@Override
public void registerMap() {
map(Type.VAR_INT, Type.LONG);
}
});
registerServerbound(ServerboundPackets1_12_1.KEEP_ALIVE, new PacketRemapper() {
@Override
public void registerMap() {
map(Type.LONG, Type.VAR_INT);
}
});
}
}<|fim▁end|>
|
protected void registerPackets() {
|
<|file_name|>index.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>export * from './booklike.component';<|fim▁end|>
| |
<|file_name|>connection_wrapper.py<|end_file_name|><|fim▁begin|>import logging
import time
from django.conf import settings
from django.core.mail import get_connection as dj_get_connection
logger = logging.getLogger(__name__)
class BackendWrapper(object):
"""A wrapper around Django's Email Backend, providing hooks
for instrumentation and testing.
"""
def __init__(self, backend):<|fim▁hole|> logger.info("initialized connection wrapper with email backend: %s", backend)
def send_messages(self, email_messages):
# check settings hook for rewriting email recipient, act accordingly
if settings.EMAIL_REWRITE_RECIPIENT:
for message in email_messages:
message.to = [settings.EMAIL_REWRITE_RECIPIENT]
# send the messages
t = time.time()
msg_count = self._backend.send_messages(email_messages)
elapsed = time.time() - t
if msg_count > 0:
logger.info('sent %s messages, elapsed: %.3fs' % (msg_count, elapsed))
for msg in email_messages:
hdrs = dict((k, v) for k, v in dict(msg.message()).iteritems()
if k.lower() not in ('date', 'from', 'subject', 'content-type', 'mime-version'))
logger.info("sent email: {}".format(repr(hdrs)))
if msg_count != len(email_messages):
logger.warn('send_messages() was called with %s messages but return value was %s',
len(email_messages), msg_count)
return msg_count
def close(self):
# never raise Exceptions on close().
try:
self._backend.close()
except Exception as e:
logger.debug("self._backend.close() failed: %s", e)
def __getattr__(self, a):
return getattr(self._backend, a)
def get_connection(*a, **kw):
return BackendWrapper(dj_get_connection(*a, **kw))<|fim▁end|>
|
self._backend = backend
|
<|file_name|>shared-tests-for-multi-geometry-views.js<|end_file_name|><|fim▁begin|>var _ = require('underscore');
module.exports = function () {
beforeEach(function () {
spyOn(_, 'debounce').and.callFake(function (func) { return function () { func.apply(this, arguments); }; });
this.geometryView.render();
});
describe('when the model is removed', function () {
it('should remove each geometry', function () {
this.geometry.geometries.each(function (polygon) {
spyOn(polygon, 'remove');
});
this.geometry.remove();
expect(this.geometry.geometries.all(function (geometry) {
return geometry.remove.calls.count() === 1;
})).toBe(true);
});
it('should remove the view', function () {
spyOn(this.geometryView, 'remove');<|fim▁hole|> expect(this.geometryView.remove).toHaveBeenCalled();
});
});
};<|fim▁end|>
|
this.geometry.remove();
|
<|file_name|>formatter.rs<|end_file_name|><|fim▁begin|>// (c) 2016 Productize SPRL <[email protected]>
use std::io;
use Sexp;
use symbolic_expressions::{Formatter, SexpError};
// custom symbolic_expressions formatter that aims to be
// kicad compatible
struct Indent {
newline_before: i64,
closing_on_new_line: bool,
newline_after: i64,
}
impl Default for Indent {
fn default() -> Indent {
Indent {
newline_before: 0,
closing_on_new_line: false,
newline_after: 0,
}
}
}
impl Indent {
fn before(&mut self) {
self.newline_before = 1;
}
fn close_on_new_line(&mut self) {
self.closing_on_new_line = true;
}
fn before_double(&mut self) {
self.newline_before = 2;
}
fn newline_after_closing(&mut self) {
self.newline_after = 1;
}
}
pub struct KicadFormatter {<|fim▁hole|> ind: Vec<u8>,
pts_xy_count: i64,
seen_module: bool,
seen_segment: bool,
}
impl KicadFormatter {
pub fn new(initial_indent_level: i64) -> KicadFormatter {
KicadFormatter {
indent: initial_indent_level,
stack: vec![],
ind: vec![b' ', b' '], // two spaces
pts_xy_count: 0,
seen_module: false,
seen_segment: false,
}
}
fn is(&self, what: &'static str) -> bool {
for x in &self.stack {
if let Some((ref name, _)) = *x {
if name == what {
return true;
}
}
}
false
}
fn parent_is(&self, what: &'static str) -> bool {
if let Some(s) = self.stack.last() {
if let Some((ref t, _)) = *s {
return t == what;
}
}
false
}
fn indent<W: io::Write>(&self, writer: &mut W, nls: i64) -> Result<(), SexpError> {
for _ in 0..nls {
writer.write_all(b"\n")?;
}
for _ in 0..self.indent {
writer.write_all(&self.ind)?;
}
Ok(())
}
fn indent_plus<W: io::Write>(&mut self, writer: &mut W, nls: i64) -> Result<(),SexpError> {
self.indent += 1;
let res = self.indent(writer, nls);
self.indent -= 1;
res
}
fn want_indent_fp_lib_table(&self, _ele: &str) -> Option<Indent> {
if !self.is("fp_lib_table") {
return None;
}
let mut indent = Indent::default();
if self.parent_is("fp_lib_table") {
indent.before();
}
Some(indent)
}
fn want_indent_module(&self, ele: &str) -> Option<Indent> {
// if !self.is("module") {
// return None
// }
let mut indent = Indent::default();
indent.before();
if self.parent_is("module") {
match ele {
"at" | "descr" | "fp_line" | "fp_poly" | "pad" | "path" | "fp_circle" | "attr" => {
return Some(indent)
}
"model" | "fp_text" | "gr_text" => {
indent.close_on_new_line();
return Some(indent);
}
_ => (),
}
}
if self.parent_is("dimension") {
if let "gr_text" = ele {
indent.close_on_new_line();
return Some(indent);
}
}
if self.parent_is("fp_text") || self.parent_is("gr_text") {
if let "effects" = ele {
return Some(indent);
}
}
if self.parent_is("pts") {
if let "xy" = ele {
let wrap = if self.is("fp_poly") { 4 } else { 5 };
if self.pts_xy_count > 0 && self.pts_xy_count % wrap == 0 {
return Some(indent);
} else if self.pts_xy_count == 0
&& (self.is("polygon") || self.is("filled_polygon"))
{
return Some(indent);
}
}
}
if self.parent_is("model") {
match ele {
"at" | "scale" | "rotate" => return Some(indent),
_ => (),
}
}
if self.parent_is("pad") {
if let "net" = ele {
return Some(indent);
}
}
None
}
fn want_indent_layout(&self, ele: &str) -> Option<Indent> {
if !self.is("kicad_pcb") {
return None;
}
let mut indent = Indent::default();
indent.before();
if self.parent_is("kicad_pcb") {
match ele {
"page" => {
indent.before_double();
return Some(indent);
}
"net" | "gr_circle" | "gr_line" | "gr_arc" | "segment" | "via" => {
return Some(indent)
}
"layers" | "gr_text" | "dimension" | "zone" => {
indent.close_on_new_line();
return Some(indent);
}
"setup" => {
indent.before_double();
indent.close_on_new_line();
indent.newline_after_closing();
return Some(indent);
}
"general" | "net_class" | "module" => {
indent.before_double();
indent.close_on_new_line();
return Some(indent);
}
_ => (),
}
}
if self.parent_is("general") {
return Some(indent);
}
if self.parent_is("layers") {
return Some(indent);
}
if self.parent_is("setup") {
return Some(indent);
}
if self.parent_is("pcbplotparams") {
return Some(indent);
}
if self.parent_is("net_class") {
return Some(indent);
}
if self.parent_is("dimension") {
match ele {
"gr_text" |
"feature1" |
"feature2" |
"crossbar" |
"arrow1a" |
"arrow1b" |
"arrow2a" |
"arrow2b" => return Some(indent),
_ => (),
}
}
if self.parent_is("zone") {
match ele {
"connect_pads" | "min_thickness" | "fill" | "keepout" | "priority" => {
return Some(indent)
}
"polygon" | "filled_polygon" => {
indent.close_on_new_line();
return Some(indent);
}
_ => (),
}
}
if self.parent_is("polygon") | self.parent_is("filled_polygon") {
indent.close_on_new_line();
return Some(indent);
}
None
}
fn want_indent(&self, value: &Sexp) -> Option<Indent> {
let first = match *value {
Sexp::List(ref l) => {
if l.is_empty() {
return None;
}
(&l[0]).clone()
}
Sexp::Empty => return None,
Sexp::String(ref l) => Sexp::String(l.clone()),
};
if let Sexp::String(ref ele) = first {
let i = self.want_indent_module(ele);
if i.is_some() {
return i;
}
let i = self.want_indent_layout(ele);
if i.is_some() {
return i;
}
let i = self.want_indent_fp_lib_table(ele);
if i.is_some() {
return i;
}
}
None
}
}
impl Formatter for KicadFormatter {
fn open<W>(&mut self, writer: &mut W, value: Option<&Sexp>) -> Result<(), SexpError>
where
W: io::Write,
{
let mut ele = String::new();
// if first element is string
if let Some(sexp) = value {
if let Sexp::String(ref s) = *sexp {
ele.push_str(s);
}
}
// special handling: ugly :(
// write an extra newline before the first element after the last module
if let "module" = &ele[..] {
self.seen_module = true;
} else if self.parent_is("kicad_pcb") && self.seen_module {
self.seen_module = false;
self.indent(writer, 1)?;
}
// write an extra newline before the first segment
if !self.seen_segment {
if let "segment" = &ele[..] {
self.seen_segment = true;
self.indent(writer, 1)?;
}
}
let exp = Sexp::String(ele.clone());
let want_indent = self.want_indent(&exp);
if let Some(ref want_indent) = want_indent {
self.indent += 1;
if want_indent.newline_before > 0 {
self.indent(writer, want_indent.newline_before)?;
}
}
// special handling for breaking of xy elements
if let "pts" = &ele[..] {
self.pts_xy_count = 0;
}
if self.parent_is("pts") {
if let "xy" = &ele[..] {
self.pts_xy_count += 1;
// if self.pts_xy_count == 5 {
// self.pts_xy_count = 1;
// }
}
}
if !ele.is_empty() {
self.stack.push(Some((ele, want_indent)))
} else {
self.stack.push(None)
}
writer.write_all(b"(").map_err(From::from)
}
fn element<W>(&mut self, writer: &mut W, value: &Sexp) -> Result<(), SexpError>
where
W: io::Write,
{
// get rid of the space if we will be putting a newline next
if self.want_indent(value).is_none() {
writer.write_all(b" ")?;
} else if let Sexp::String(_) = *value {
writer.write_all(b" ")?;
}
Ok(())
}
fn close<W>(&mut self, writer: &mut W) -> Result<(), SexpError>
where
W: io::Write,
{
if let Some(Some((s, want_indent))) = self.stack.pop() {
if let Some(indent) = want_indent {
self.indent -= 1;
if indent.closing_on_new_line {
self.indent_plus(writer, 1)?;
}
// special handling of toplevel module...
// which doesn't work, because it is not indented
if &s == "module" && self.stack.is_empty() {
writer.write_all(b"\n")?;
}
writer.write_all(b")")?;
for _ in 0..indent.newline_after {
writer.write_all(b"\n")?;
}
return Ok(());
} else if self.stack.is_empty()
&& (&s == "module" || &s == "kicad_pcb" || &s == "fp_lib_table")
{
writer.write_all(b"\n")?;
}
}
writer.write_all(b")")?;
Ok(())
}
}<|fim▁end|>
|
indent: i64,
stack: Vec<Option<(String, Option<Indent>)>>,
|
<|file_name|>livestream.py<|end_file_name|><|fim▁begin|>import logging
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream import HLSStream
from streamlink.utils.parse import parse_json
log = logging.getLogger(__name__)
@pluginmatcher(re.compile(
r"https?://(?:www\.)?livestream\.com/"<|fim▁hole|> _stream_config_schema = validate.Schema(validate.any({
"event": {
"stream_info": validate.any({
"is_live": bool,
"secure_m3u8_url": validate.url(scheme="https"),
}, None),
}
}, {}), validate.get("event", {}), validate.get("stream_info", {}))
def _get_streams(self):
res = self.session.http.get(self.url)
m = self._config_re.search(res.text)
if not m:
log.debug("Unable to find _config_re")
return
stream_info = parse_json(m.group(1), "config JSON",
schema=self._stream_config_schema)
log.trace("stream_info: {0!r}".format(stream_info))
if not (stream_info and stream_info["is_live"]):
log.debug("Stream might be Off Air")
return
m3u8_url = stream_info.get("secure_m3u8_url")
if m3u8_url:
yield from HLSStream.parse_variant_playlist(self.session, m3u8_url).items()
__plugin__ = Livestream<|fim▁end|>
|
))
class Livestream(Plugin):
_config_re = re.compile(r"window.config = ({.+})")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.